コード例 #1
0
ファイル: stratum.py プロジェクト: lsudduth/p2pool-rav
 def _send_work(self):
     try:
         x, got_response = self.wb.get_work(*self.wb.preprocess_request('' if self.username is None else self.username))
     except:
         log.err()
         self.transport.loseConnection()
         return
     jobid = str(random.randrange(2**128))
     clean_jobs = False
     if x['previous_block'] != self.previous_block:
         clean_jobs = True
         self.previous_block = x['previous_block']
     self.other.svc_mining.rpc_set_difficulty(bitcoin_data.target_to_difficulty(x['share_target'])*self.wb.net.DUMB_SCRYPT_DIFF).addErrback(lambda err: None)
     self.other.svc_mining.rpc_notify(
         jobid, # jobid
         getwork._swap4(pack.IntType(256).pack(x['previous_block'])).encode('hex'), # prevhash
         x['coinb1'].encode('hex'), # coinb1
         x['coinb2'].encode('hex'), # coinb2
         [pack.IntType(256).pack(s).encode('hex') for s in x['merkle_link']['branch']], # merkle_branch
         getwork._swap4(pack.IntType(32).pack(x['version'])).encode('hex'), # version
         getwork._swap4(pack.IntType(32).pack(x['bits'].bits)).encode('hex'), # nbits
         getwork._swap4(pack.IntType(32).pack(x['timestamp'])).encode('hex'), # ntime
         clean_jobs, # clean_jobs
     ).addErrback(lambda err: None)
     self.handler_map[jobid] = x, got_response
コード例 #2
0
 def _send_work(self):
     try:
         x, got_response = self.wb.get_work(*self.wb.preprocess_request(
             '' if self.username is None else self.username))
     except:
         log.err()
         self.transport.loseConnection()
         return
     jobid = str(random.randrange(2**128))
     self.other.svc_mining.rpc_set_difficulty(
         bitcoin_data.target_to_difficulty(x['share_target']) *
         self.wb.net.DUMB_SCRYPT_DIFF).addErrback(lambda err: None)
     self.other.svc_mining.rpc_notify(
         jobid,  # jobid
         getwork._swap4(pack.IntType(256).pack(
             x['previous_block'])).encode('hex'),  # prevhash
         x['coinb1'].encode('hex'),  # coinb1
         x['coinb2'].encode('hex'),  # coinb2
         [
             pack.IntType(256).pack(s).encode('hex')
             for s in x['merkle_link']['branch']
         ],  # merkle_branch
         getwork._swap4(pack.IntType(32).pack(
             x['version'])).encode('hex'),  # version
         getwork._swap4(pack.IntType(32).pack(
             x['bits'].bits)).encode('hex'),  # nbits
         getwork._swap4(pack.IntType(32).pack(
             x['timestamp'])).encode('hex'),  # ntime
         True,  # clean_jobs
     ).addErrback(lambda err: None)
     self.handler_map[jobid] = x, got_response
コード例 #3
0
ファイル: run.py プロジェクト: theassyrian/p2pool.info2
 def handle_block(block_data):
     block = block_data['block']
     
     txouts = block['txs'][0]['tx_outs']
     
     if len(txouts) < 25: return
     if not txouts[-1]['script'].startswith('\x6a'): return
     if len(txouts[-1]['script']) < 33: return
     if txouts[-1]['value'] != 0: return
     if txouts[-2]['script'] != p2pool_data.DONATION_SCRIPT: return
     
     hash_str = '%064x' % bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
     print hash_str
     
     if hash_str not in blocks_dict:
         print 'inserted'
         x = dict(
             Id=hash_str,
             PrevBlock='%064x' % block['header']['previous_block'],
             GenerationTxHash='%064x' % block_data['gentx_hash'],
             BlockHeight=block_data['height'],
             Difficulty=bitcoin_data.target_to_difficulty(block['header']['bits'].target),
             Timestamp=block['header']['timestamp'],
             IsOrphaned=None, # XXX
         )
         blocks.append(x)
         blocks_dict[hash_str] = x
コード例 #4
0
ファイル: stratum.py プロジェクト: Neels99/p2pool
 def _send_work(self):
     try:
         x, got_response = self.wb.get_work(*self.wb.preprocess_request('' if self.username is None else self.username))
     except:
         log.err()
         self.transport.loseConnection()
         return
     if self.desired_pseudoshare_target: # Not none or zero, if we calculate better Difficulty setting?
         self.fixed_target = True
         # set new target for difficulty, based on time for it's generation?
         self.target = self.desired_pseudoshare_target
         self.target = max(self.target, int(x['bits'].target))
     else:
         self.fixed_target = False
         self.target = x['share_target'] if self.target == None else max(x['min_share_target'], self.target)
     jobid = str(random.randrange(2**128))
     self.other.svc_mining.rpc_set_difficulty(bitcoin_data.target_to_difficulty(self.target)*self.wb.net.DUMB_SCRYPT_DIFF).addErrback(lambda err: None)
     self.other.svc_mining.rpc_notify(
         jobid, # jobid
         getwork._swap4(pack.IntType(256).pack(x['previous_block'])).encode('hex'), # prevhash
         x['coinb1'].encode('hex'), # coinb1
         x['coinb2'].encode('hex'), # coinb2
         [pack.IntType(256).pack(s).encode('hex') for s in x['merkle_link']['branch']], # merkle_branch
         getwork._swap4(pack.IntType(32).pack(x['version'])).encode('hex'), # version
         getwork._swap4(pack.IntType(32).pack(x['bits'].bits)).encode('hex'), # nbits
         getwork._swap4(pack.IntType(32).pack(x['timestamp'])).encode('hex'), # ntime
         True, # clean_jobs
     ).addErrback(lambda err: None)
     self.handler_map[jobid] = x, got_response
コード例 #5
0
ファイル: stratum.py プロジェクト: Neels99/p2pool
    def rpc_submit(self, worker_name, job_id, extranonce2, ntime, nonce, version_bits = None, *args):
        #asicboost: version_bits is the version mask that the miner used
        worker_name = worker_name.strip()
        if job_id not in self.handler_map:
            print >>sys.stderr, '''Couldn't link returned work's job id with its handler. This should only happen if this process was recently restarted!'''
            #self.other.svc_client.rpc_reconnect().addErrback(lambda err: None)
            return False
        x, got_response = self.handler_map[job_id]
        coinb_nonce = extranonce2.decode('hex')
        assert len(coinb_nonce) == self.wb.COINBASE_NONCE_LENGTH
        new_packed_gentx = x['coinb1'] + coinb_nonce + x['coinb2']

        job_version = x['version']
        nversion = job_version
        #check if miner changed bits that they were not supposed to change
        if version_bits:
            if ((~self.pool_version_mask) & int(version_bits,16)) != 0:
                #todo: how to raise error back to miner?
                #protocol does not say error needs to be returned but ckpool returns
                #{"error": "Invalid version mask", "id": "id", "result":""}
                raise ValueError("Invalid version mask {0}".format(version_bits))
            nversion = (job_version & ~self.pool_version_mask) | (int(version_bits,16) & self.pool_version_mask)
            #nversion = nversion & int(version_bits,16)

        header = dict(
            version=nversion,
            previous_block=x['previous_block'],
            merkle_root=bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), x['merkle_link']), # new_packed_gentx has witness data stripped
            timestamp=pack.IntType(32).unpack(getwork._swap4(ntime.decode('hex'))),
            bits=x['bits'],
            nonce=pack.IntType(32).unpack(getwork._swap4(nonce.decode('hex'))),
        )
        result = got_response(header, worker_name, coinb_nonce, self.target) # self.target from RPC response

        # adjust difficulty on this stratum to target ~10sec/pseudoshare!!!
        if not self.fixed_target:
            self.recent_shares.append(time.time())
            if len(self.recent_shares) > 12 or (time.time() - self.recent_shares[0]) > 10*len(self.recent_shares)*self.share_rate:
                old_time = self.recent_shares[0]
                del self.recent_shares[0]
                olddiff = bitcoin_data.target_to_difficulty(self.target)
                # calculate new target based on time needed for generating previous shares
                self.target = int(self.target * clip((time.time() - old_time)/(len(self.recent_shares)*self.share_rate), 0.5, 2.) + 0.5)
                newtarget = clip(self.target, self.wb.net.SANE_TARGET_RANGE[0], self.wb.net.SANE_TARGET_RANGE[1])
                if newtarget != self.target:
                    print "Clipping target from %064x to %064x" % (self.target, newtarget)
                    self.target = newtarget
                self.target = max(x['min_share_target'], self.target)
                self.recent_shares = [time.time()]
                self._send_work()

        return result
コード例 #6
0
ファイル: stratum.py プロジェクト: 9cat/yacoin-p2pool
 def _send_work(self):
     x, got_response = self.wb.get_work(*self.wb.preprocess_request('' if self.username is None else self.username))
     jobid = str(random.randrange(2**128))
     self.other.svc_mining.rpc_set_difficulty(bitcoin_data.target_to_difficulty(x['share_target'])).addErrback(lambda err: None)
     self.other.svc_mining.rpc_notify(
         jobid, # jobid
         getwork._swap4(pack.IntType(256).pack(x['previous_block'])).encode('hex'), # prevhash
         x['coinb1'].encode('hex'), # coinb1
         x['coinb2'].encode('hex'), # coinb2
         [pack.IntType(256).pack(s).encode('hex') for s in x['merkle_link']['branch']], # merkle_branch
         getwork._swap4(pack.IntType(32).pack(x['version'])).encode('hex'), # version
         getwork._swap4(pack.IntType(32).pack(x['bits'].bits)).encode('hex'), # nbits
         getwork._swap4(pack.IntType(32).pack(x['timestamp'])).encode('hex'), # ntime
         True, # clean_jobs
     ).addErrback(lambda err: None)
     self.handler_map[jobid] = x, got_response
コード例 #7
0
 def think(self, block_rel_height_func, previous_block, bits, known_txs):
     desired = set()
     
     # O(len(self.heads))
     #   make 'unverified heads' set?
     # for each overall head, attempt verification
     # if it fails, attempt on parent, and repeat
     # if no successful verification because of lack of parents, request parent
     bads = set()
     for head in set(self.heads) - set(self.verified.heads):
         head_height, last = self.get_height_and_last(head)
         
         for share in self.get_chain(head, head_height if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
             if self.attempt_verify(share):
                 break
             if share.hash in self.heads:
                 bads.add(share.hash)
         else:
             if last is not None:
                 desired.add((
                     self.items[random.choice(list(self.reverse[last]))].peer_addr,
                     last,
                     max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
                     min(x.target for x in self.get_chain(head, min(head_height, 5))),
                 ))
     for bad in bads:
         assert bad not in self.verified.items
         assert bad in self.heads
         if p2pool.DEBUG:
             print "BAD", bad
         self.remove(bad)
     
     # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
     for head in list(self.verified.heads):
         head_height, last_hash = self.verified.get_height_and_last(head)
         last_height, last_last_hash = self.get_height_and_last(last_hash)
         # XXX review boundary conditions
         want = max(self.net.CHAIN_LENGTH - head_height, 0)
         can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
         get = min(want, can)
         #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
         for share in self.get_chain(last_hash, get):
             if not self.attempt_verify(share):
                 break
         if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
             desired.add((
                 self.items[random.choice(list(self.verified.reverse[last_hash]))].peer_addr,
                 last_last_hash,
                 max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
                 min(x.target for x in self.get_chain(head, min(head_height, 5))),
             ))
     
     # decide best tree
     decorated_tails = sorted((self.score(max(self.verified.tails[tail_hash], key=self.verified.get_work), block_rel_height_func), tail_hash) for tail_hash in self.verified.tails)
     if p2pool.DEBUG:
         print len(decorated_tails), 'tails:'
         for score, tail_hash in decorated_tails:
             print format_hash(tail_hash), score
     best_tail_score, best_tail = decorated_tails[-1] if decorated_tails else (None, None)
     
     # decide best verified head
     decorated_heads = sorted(((
         self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
         #self.items[h].peer_addr is None,
         -self.items[h].should_punish_reason(previous_block, bits, self, known_txs)[0],
         -self.items[h].time_seen,
     ), h) for h in self.verified.tails.get(best_tail, []))
     if p2pool.DEBUG:
         print len(decorated_heads), 'heads. Top 10:'
         for score, head_hash in decorated_heads[-10:]:
             print '   ', format_hash(head_hash), format_hash(self.items[head_hash].previous_hash), score
     best_head_score, best = decorated_heads[-1] if decorated_heads else (None, None)
     
     if best is not None:
         best_share = self.items[best]
         punish, punish_reason = best_share.should_punish_reason(previous_block, bits, self, known_txs)
         if punish > 0:
             print 'Punishing share for %r! Jumping from %s to %s!' % (punish_reason, format_hash(best), format_hash(best_share.previous_hash))
             best = best_share.previous_hash
         
         timestamp_cutoff = min(int(time.time()), best_share.timestamp) - 3600
         target_cutoff = int(2**256//(self.net.SHARE_PERIOD*best_tail_score[1] + 1) * 2 + .5) if best_tail_score[1] is not None else 2**256-1
     else:
         timestamp_cutoff = int(time.time()) - 24*60*60
         target_cutoff = 2**256-1
     
     if p2pool.DEBUG:
         print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (len(desired), math.format_dt(time.time() - timestamp_cutoff), bitcoin_data.target_to_difficulty(target_cutoff))
         for peer_addr, hash, ts, targ in desired:
             print '   ', None if peer_addr is None else '%s:%i' % peer_addr, format_hash(hash), math.format_dt(time.time() - ts), bitcoin_data.target_to_difficulty(targ), ts >= timestamp_cutoff, targ <= target_cutoff
     
     return best, [(peer_addr, hash) for peer_addr, hash, ts, targ in desired if ts >= timestamp_cutoff], decorated_heads
コード例 #8
0
ファイル: data.py プロジェクト: GTRsdk/p2pool
 def think(self, block_rel_height_func, previous_block, bits):
     desired = set()
     
     # O(len(self.heads))
     #   make 'unverified heads' set?
     # for each overall head, attempt verification
     # if it fails, attempt on parent, and repeat
     # if no successful verification because of lack of parents, request parent
     bads = set()
     for head in set(self.heads) - set(self.verified.heads):
         head_height, last = self.get_height_and_last(head)
         
         for share in self.get_chain(head, head_height if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
             if self.attempt_verify(share):
                 break
             if share.hash in self.heads:
                 bads.add(share.hash)
         else:
             if last is not None:
                 desired.add((
                     self.shares[random.choice(list(self.reverse_shares[last]))].peer,
                     last,
                     max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
                     min(x.target for x in self.get_chain(head, min(head_height, 5))),
                 ))
     for bad in bads:
         assert bad not in self.verified.shares
         assert bad in self.heads
         if p2pool.DEBUG:
             print "BAD", bad
         self.remove(bad)
     
     # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
     for head in list(self.verified.heads):
         head_height, last_hash = self.verified.get_height_and_last(head)
         last_height, last_last_hash = self.get_height_and_last(last_hash)
         # XXX review boundary conditions
         want = max(self.net.CHAIN_LENGTH - head_height, 0)
         can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
         get = min(want, can)
         #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
         for share in self.get_chain(last_hash, get):
             if not self.attempt_verify(share):
                 break
         if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
             desired.add((
                 self.shares[random.choice(list(self.verified.reverse_shares[last_hash]))].peer,
                 last_last_hash,
                 max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
                 min(x.target for x in self.get_chain(head, min(head_height, 5))),
             ))
     
     # decide best tree
     decorated_tails = sorted((self.score(max(self.verified.tails[tail_hash], key=self.verified.get_work), block_rel_height_func), tail_hash) for tail_hash in self.verified.tails)
     if p2pool.DEBUG:
         print len(decorated_tails), 'tails:'
         for score, tail_hash in decorated_tails:
             print format_hash(tail_hash), score
     best_tail_score, best_tail = decorated_tails[-1] if decorated_tails else (None, None)
     
     # decide best verified head
     decorated_heads = sorted(((
         self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
         #self.shares[h].peer is None,
         self.shares[h].pow_hash <= self.shares[h].header['bits'].target, # is block solution
         (self.shares[h].header['previous_block'], self.shares[h].header['bits']) == (previous_block, bits) or self.shares[h].peer is None,
         -self.shares[h].time_seen,
     ), h) for h in self.verified.tails.get(best_tail, []))
     if p2pool.DEBUG:
         print len(decorated_heads), 'heads. Top 10:'
         for score, head_hash in decorated_heads[-10:]:
             print '   ', format_hash(head_hash), format_hash(self.shares[head_hash].previous_hash), score
     best_head_score, best = decorated_heads[-1] if decorated_heads else (None, None)
     
     # eat away at heads
     if decorated_heads:
         for i in xrange(1000):
             to_remove = set()
             for share_hash, tail in self.heads.iteritems():
                 if share_hash in [head_hash for score, head_hash in decorated_heads[-5:]]:
                     #print 1
                     continue
                 if self.shares[share_hash].time_seen > time.time() - 300:
                     #print 2
                     continue
                 if share_hash not in self.verified.shares and max(self.shares[after_tail_hash].time_seen for after_tail_hash in self.reverse_shares.get(tail)) > time.time() - 120: # XXX stupid
                     #print 3
                     continue
                 to_remove.add(share_hash)
             if not to_remove:
                 break
             for share_hash in to_remove:
                 if share_hash in self.verified.shares:
                     self.verified.remove(share_hash)
                 self.remove(share_hash)
             #print "_________", to_remove
     
     # drop tails
     for i in xrange(1000):
         to_remove = set()
         for tail, heads in self.tails.iteritems():
             if min(self.get_height(head) for head in heads) < 2*self.net.CHAIN_LENGTH + 10:
                 continue
             for aftertail in self.reverse_shares.get(tail, set()):
                 if len(self.reverse_shares[self.shares[aftertail].previous_hash]) > 1: # XXX
                     print "raw"
                     continue
                 to_remove.add(aftertail)
         if not to_remove:
             break
         # if removed from this, it must be removed from verified
         #start = time.time()
         for aftertail in to_remove:
             if self.shares[aftertail].previous_hash not in self.tails:
                 print "erk", aftertail, self.shares[aftertail].previous_hash
                 continue
             if aftertail in self.verified.shares:
                 self.verified.remove(aftertail)
             self.remove(aftertail)
         #end = time.time()
         #print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))
     
     if best is not None:
         best_share = self.shares[best]
         if (best_share.header['previous_block'], best_share.header['bits']) != (previous_block, bits) and best_share.header_hash != previous_block and best_share.peer is not None:
             if p2pool.DEBUG:
                 print 'Stale detected! %x < %x' % (best_share.header['previous_block'], previous_block)
             best = best_share.previous_hash
         
         timestamp_cutoff = min(int(time.time()), best_share.timestamp) - 3600
         target_cutoff = 2**256//(self.net.SHARE_PERIOD*best_tail_score[1] + 1) * 2 if best_tail_score[1] is not None else 2**256-1
     else:
         timestamp_cutoff = int(time.time()) - 24*60*60
         target_cutoff = 2**256-1
     
     if p2pool.DEBUG:
         print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (len(desired), math.format_dt(time.time() - timestamp_cutoff), bitcoin_data.target_to_difficulty(target_cutoff))
         for peer, hash, ts, targ in desired:
             print '   ', '%s:%i' % peer.addr if peer is not None else None, format_hash(hash), math.format_dt(time.time() - ts), bitcoin_data.target_to_difficulty(targ), ts >= timestamp_cutoff, targ <= target_cutoff
     
     return best, [(peer, hash) for peer, hash, ts, targ in desired if ts >= timestamp_cutoff and targ <= target_cutoff]
コード例 #9
0
    def generate_transaction(cls,
                             tracker,
                             share_data,
                             block_target,
                             desired_timestamp,
                             desired_target,
                             ref_merkle_link,
                             desired_other_transaction_hashes_and_fees,
                             net,
                             known_txs=None,
                             last_txout_nonce=0,
                             base_subsidy=None):

        previous_share = tracker.items[
            share_data['previous_share_hash']] if share_data[
                'previous_share_hash'] is not None else None

        def get_coinbase_fee(share_data, outpointsnum):
            # calculate neccessary coinbase fee

            # coinbase usually seems like this:
            #
            # 01000000 - nVersion
            # 1a184351 - nTimestamp

            # 01 - Inputs num
            # 0000000000000000000000000000000000000000000000000000000000000000 - Input hash
            # ffffffff - Input index (-1)
            # 0a02732a062f503253482f - Scriptsig
            # ffffffff - nSequence

            # 15 - Outpoints num
            # (User outpoints, 44 bytes per each)
            # (Donation outpoint, 76 bytes)

            # P2Pool service outpoint (contains merkle link), 46 bytes
            #
            # 1027000000000000
            # 25
            # 2417cc2063b11fd5255c7e5605780de78163ffc698ed22856bff1a5d880c3c44e400000000

            # Giving users some time to upgrade
            coinbase_size = 50 + (
                1 + len(share_data['coinbase'])) + outpointsnum * 44 + 76 + 46

            # if coinbase size is greater than 1000 bytes, it should pay fee (0.01 per 1000 bytes)
            if coinbase_size > 1000:
                return int(ceil(coinbase_size / 1000.0) * minout)

            return 0

        if base_subsidy is None:
            base_subsidy = net.PARENT.SUBSIDY_FUNC(block_target)

        # current user payout script

        this_script = share_data['script']

        height, last = tracker.get_height_and_last(
            share_data['previous_share_hash'])

        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(
                tracker,
                share_data['previous_share_hash'],
                net.TARGET_LOOKBEHIND,
                min_work=True,
                integer=True)
            pre_target = 2**256 // (
                net.SHARE_PERIOD *
                attempts_per_second) - 1 if attempts_per_second else 2**256 - 1
            pre_target2 = math.clip(pre_target,
                                    (previous_share.max_target * 9 // 10,
                                     previous_share.max_target * 11 // 10))
            pre_target3 = math.clip(pre_target2,
                                    (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(
            math.clip(desired_target, (pre_target3 // 10, pre_target3)))

        if p2pool.DEBUG:
            print
            print "Share Info Bits Target (DATA)"
            print bitcoin_data.target_to_difficulty(bits.target)
            print

        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []

        past_shares = list(
            tracker.get_chain(share_data['previous_share_hash'],
                              min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1 + i,
                                                j]  # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = bitcoin_data.tx_type.packed_size(
                        known_txs[tx_hash])
                    if new_transaction_size + this_size > 50000:  # only allow 50 kB of new txns/share
                        break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes) - 1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        included_transactions = set(other_transaction_hashes)

        share_data = dict(share_data, subsidy=base_subsidy)

        raw_weights, total_weight, donation_weight = tracker.get_cumulative_weights(
            share_data['previous_share_hash'],
            min(height, net.REAL_CHAIN_LENGTH),
            65535 * net.SPREAD *
            bitcoin_data.target_to_average_attempts(block_target),
        )

        # calculate "raw" subsidy
        raw_subsidy = share_data['subsidy'] - 4 * minout - get_coinbase_fee(
            share_data,
            len(raw_weights) + 1)

        # calculate "raw" amounts
        raw_amounts = dict((script, raw_subsidy * weight // total_weight)
                           for script, weight in raw_weights.iteritems())

        total_remowed_weight = 0
        weights = {}

        # iterate list and collect all weights, which produces less than 0.01 payout
        # it's neccessary due to NVC/PPC protocol-level limitations for coinbase outpoint size
        for x in raw_amounts.keys():
            if raw_amounts[x] < minout and x not in [
                    this_script, DONATION_SCRIPT
            ]:
                total_remowed_weight = total_remowed_weight + raw_weights[x]
            else:
                weights[x] = raw_weights[x]

        total_weight = total_weight - total_remowed_weight
        assert total_weight == sum(weights.itervalues()) + donation_weight, (
            total_weight, sum(weights.itervalues()) + donation_weight)

        # base subsidy value calculated as:
        # [subsidy - (0.01 for donation + 0.01 for current user + 0.01 for p2pool outpoint) - netfee]
        my_subsidy = share_data['subsidy'] - 3 * minout - get_coinbase_fee(
            share_data,
            len(weights) + 1)

        # subsidy goes according to weights prior to this share
        amounts = dict((script, my_subsidy * weight // total_weight)
                       for script, weight in weights.iteritems())

        # all that's left over is the donation weight and some extra satoshis due to rounding
        amounts[DONATION_SCRIPT] = amounts.get(
            DONATION_SCRIPT, 0) + my_subsidy - sum(amounts.itervalues())

        if sum(amounts.itervalues()) != my_subsidy or any(
                x < 0 for x in amounts.itervalues()):
            raise ValueError()

        # add 0.01 coin to donation, to satisfy the protocol
        amounts[DONATION_SCRIPT] = amounts[DONATION_SCRIPT] + minout

        # add 0.01 to current user output, to satisfy the protocol
        amounts[this_script] = amounts.get(this_script, 0) + minout

        dests = sorted(
            amounts.iterkeys(),
            key=lambda script:
            (script == DONATION_SCRIPT, amounts[script], script))[
                -4000:]  # block length limit, unlikely to ever be hi

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else
            tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=desired_timestamp,  # need better solution
            #           timestamp=math.clip(desired_timestamp, (
            #               (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
            #               (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
            #           )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
        )

        if p2pool.DEBUG:
            print
            print "Desired timestamp (DATA)"
            print desired_timestamp
            print time.time()
            print
            print "Prev Share timestamp (DATA)"
            print previous_share.timestamp
            print time.time()
            print
            print "Share info timestamp (DATA)"
            print share_info['timestamp']
            print time.time()
            print

        gentx = dict(
            version=1,
            # coinbase timestamp must be older than share/block timestamp
            # maybe there are more elegant solution, but this hack works quite well for now
            timestamp=share_info['timestamp'],
            tx_ins=[
                dict(
                    previous_output=None,
                    sequence=None,
                    script=share_data['coinbase'],
                )
            ],
            tx_outs=[
                dict(value=amounts[script], script=script) for script in dests
                if amounts[script] or script == DONATION_SCRIPT
            ] + [
                dict(
                    # add 0.01 coin to service output, to satisfy the protocol
                    value=minout,
                    script='\x24' +
                    cls.get_ref_hash(net, share_info, ref_merkle_link) +
                    pack.IntType(32).pack(last_txout_nonce),
                )
            ],
            lock_time=0,
        )

        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header)
            del min_header['merkle_root']
            share = cls(
                net, None,
                dict(
                    min_header=min_header,
                    share_info=share_info,
                    ref_merkle_link=dict(branch=[], index=0),
                    last_txout_nonce=last_txout_nonce,
                    hash_link=prefix_to_hash_link(
                        bitcoin_data.tx_type.pack(gentx)[:-32 - 4 - 4],
                        cls.gentx_before_refhash),
                    merkle_link=bitcoin_data.calculate_merkle_link(
                        [None] + other_transaction_hashes, 0),
                ))
            assert share.header == header  # checks merkle_root
            return share

        return share_info, gentx, other_transaction_hashes, get_share
コード例 #10
0
ファイル: data.py プロジェクト: mmitech/p2pool-cach
    def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None):

        previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None

        def get_coinbase_fee(share_data, outpointsnum):
            # calculate neccessary coinbase fee

            # coinbase usually seems like this:
            #
            # 01000000 - nVersion
            # 1a184351 - nTimestamp

            # 01 - Inputs num
            # 0000000000000000000000000000000000000000000000000000000000000000 - Input hash
            # ffffffff - Input index (-1)
            # 0a02732a062f503253482f - Scriptsig
            # ffffffff - nSequence

            # 15 - Outpoints num
            # (User outpoints, 44 bytes per each)
            # (Donation outpoint, 76 bytes)

            # P2Pool service outpoint (contains merkle link), 46 bytes
            #
            # 1027000000000000
            # 25
            # 2417cc2063b11fd5255c7e5605780de78163ffc698ed22856bff1a5d880c3c44e400000000

            # Giving users some time to upgrade
            coinbase_size = 50 + (1 + len(share_data['coinbase'])) + outpointsnum * 44 + 76 + 46

            # if coinbase size is greater than 1000 bytes, it should pay fee (0.01 per 1000 bytes)
            if coinbase_size > 1000:
                return int(ceil(coinbase_size / 1000.0) * minout)

            return 0

        if base_subsidy is None:
            base_subsidy = net.PARENT.SUBSIDY_FUNC(block_target)

        # current user payout script

        this_script = share_data['script']

        height, last = tracker.get_height_and_last(share_data['previous_share_hash'])

        assert height >= net.REAL_CHAIN_LENGTH or last is None
        if height < net.TARGET_LOOKBEHIND:
            pre_target3 = net.MAX_TARGET
        else:
            attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
            pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
            pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
            pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
        max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
        bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//10, pre_target3)))

        if p2pool.DEBUG:
            print
            print "Share Info Bits Target (DATA)"
            print bitcoin_data.target_to_difficulty(bits.target)
            print

        new_transaction_hashes = []
        new_transaction_size = 0
        transaction_hash_refs = []
        other_transaction_hashes = []

        past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
        tx_hash_to_this = {}
        for i, share in enumerate(past_shares):
            for j, tx_hash in enumerate(share.new_transaction_hashes):
                if tx_hash not in tx_hash_to_this:
                    tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
        for tx_hash, fee in desired_other_transaction_hashes_and_fees:
            if tx_hash in tx_hash_to_this:
                this = tx_hash_to_this[tx_hash]
            else:
                if known_txs is not None:
                    this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
                    if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
                        break
                    new_transaction_size += this_size
                new_transaction_hashes.append(tx_hash)
                this = [0, len(new_transaction_hashes)-1]
            transaction_hash_refs.extend(this)
            other_transaction_hashes.append(tx_hash)

        included_transactions = set(other_transaction_hashes)

        share_data = dict(share_data, subsidy=base_subsidy)

        raw_weights, total_weight, donation_weight = tracker.get_cumulative_weights(share_data['previous_share_hash'],
            min(height, net.REAL_CHAIN_LENGTH),
            65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
        )

        # calculate "raw" subsidy
        raw_subsidy = share_data['subsidy'] - 4 * minout - get_coinbase_fee(share_data, len(raw_weights) + 1)

        # calculate "raw" amounts
        raw_amounts = dict((script, raw_subsidy*weight//total_weight) for script, weight in raw_weights.iteritems())

        total_remowed_weight = 0
        weights = {}

        # iterate list and collect all weights, which produces less than 0.01 payout
        # it's neccessary due to NVC/PPC protocol-level limitations for coinbase outpoint size
        for x in raw_amounts.keys():
            if raw_amounts[x] < minout and x not in [this_script, DONATION_SCRIPT]:
                total_remowed_weight = total_remowed_weight + raw_weights[x]
            else:
                weights[x] = raw_weights[x]

        total_weight = total_weight - total_remowed_weight
        assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)


        # base subsidy value calculated as:
        # [subsidy - (0.01 for donation + 0.01 for current user + 0.01 for p2pool outpoint) - netfee]
        my_subsidy = share_data['subsidy'] - 3 * minout - get_coinbase_fee(share_data, len(weights) + 1)

        # subsidy goes according to weights prior to this share
        amounts = dict((script, my_subsidy*weight//total_weight) for script, weight in weights.iteritems())

        # all that's left over is the donation weight and some extra satoshis due to rounding
        amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + my_subsidy - sum(amounts.itervalues())

        if sum(amounts.itervalues()) != my_subsidy or any(x < 0 for x in amounts.itervalues()):
            raise ValueError()

        # add 0.01 coin to donation, to satisfy the protocol
        amounts[DONATION_SCRIPT] = amounts[DONATION_SCRIPT] + minout

        # add 0.01 to current user output, to satisfy the protocol
        amounts[this_script] = amounts.get(this_script, 0) + minout

        dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hi

        share_info = dict(
            share_data=share_data,
            far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
            max_bits=max_bits,
            bits=bits,
            timestamp=desired_timestamp,    # need better solution
#           timestamp=math.clip(desired_timestamp, (
#               (previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
#               (previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
#           )) if previous_share is not None else desired_timestamp,
            new_transaction_hashes=new_transaction_hashes,
            transaction_hash_refs=transaction_hash_refs,
        )

        if p2pool.DEBUG:
            print
            print "Desired timestamp (DATA)"
            print desired_timestamp
            print time.time()
            print
            print "Prev Share timestamp (DATA)"
            print previous_share.timestamp
            print time.time()
            print
            print "Share info timestamp (DATA)"
            print share_info['timestamp']
            print time.time()
            print

        gentx = dict(
            version=1,
            # coinbase timestamp must be older than share/block timestamp
            # maybe there are more elegant solution, but this hack works quite well for now
            timestamp=share_info['timestamp'],
            tx_ins=[dict(
                previous_output=None,
                sequence=None,
                script=share_data['coinbase'],
            )],
            tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] + [dict(
                # add 0.01 coin to service output, to satisfy the protocol
                value=minout,
                script='\x24' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(32).pack(last_txout_nonce),
            )],
            lock_time=0,
        )

        def get_share(header, last_txout_nonce=last_txout_nonce):
            min_header = dict(header); del min_header['merkle_root']
            share = cls(net, None, dict(
                min_header=min_header,
                share_info=share_info,
                ref_merkle_link=dict(branch=[], index=0),
                last_txout_nonce=last_txout_nonce,
                hash_link=prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-4-4], cls.gentx_before_refhash),
                merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
            ))
            assert share.header == header # checks merkle_root
            return share

        return share_info, gentx, other_transaction_hashes, get_share
コード例 #11
0
ファイル: run.py プロジェクト: theassyrian/p2pool.info2
def main():
    datadir = sys.argv[1]
    b = jsonrpc.HTTPProxy(sys.argv[2], dict(
        Authorization='Basic ' + base64.b64encode(
            sys.argv[3] + ':' + sys.argv[4]
        ),
    ), timeout=30)
    p2pool_base_urls = sys.argv[5:]

    @defer.inlineCallbacks
    def get(blah):
        for p2pool_base_url in util_math.shuffled(p2pool_base_urls):
            url = p2pool_base_url.rstrip('/') + '/' + blah
            print 'trying', url
            try:
                d = yield client.getPage(url)
            except Exception:
                traceback.print_exc()
            else:
                defer.returnValue(json.loads(d))
        raise ValueError('no good p2pool servers')

    # read old

    old_blocks = json.loads(_atomic_read(
        os.path.join(datadir, 'blocks'),
        '[]',
    ))
    old_stats = json.loads(_atomic_read(
        os.path.join(datadir, 'stats'),
        '{"rates": [], "maxRate": 0, "users": [], "maxUsers": 0}',
    ))


    # update
    #print stats

    web_local_stats = yield get('local_stats')
    web_global_stats = yield get('global_stats')
    web_users = yield get('users')
    web_current_payouts = yield get('current_payouts')

    difficulty = bitcoin_data.target_to_difficulty(
        bitcoin_data.average_attempts_to_target(web_local_stats['attempts_to_block']))
    users = [dict(
        Hashrate=util_math.format(int(frac * web_global_stats['pool_hash_rate'] + 1/2), add_space=True) + 'H/s',
        Address=addr,
    ) for addr, frac in sorted(web_users.iteritems(), key=lambda (k, v): -v)]

    payouts = [dict(
        Address=addr,
        Payment=amt,
    ) for addr, amt in sorted(web_current_payouts.iteritems(), key=lambda (k, v): -v)]

    def update_timeseries(x, value, now_time):
        lastlast_time = x[-2][0]
        last_time = x[-1][0]
        next_time = last_time + (last_time - lastlast_time)
        
        if abs(now_time - last_time) < abs(now_time - next_time):
            # update last
            old_value = x[-1][1]
            old_weight = x[-1][2] if len(x[-1]) >= 3 else 1e9
            return x[:-1] + [
                [last_time, (old_value * old_weight + value)/(old_weight + 1), old_weight + 1]
            ]
        else:
            # start next
            return x + [
                [next_time, value, 1]
            ]

    stats = dict(
        rates=update_timeseries(old_stats['rates'], web_global_stats['pool_hash_rate']/1e9, time.time()*1e3),
        maxRate=max(old_stats['maxRate'], web_global_stats['pool_hash_rate']/1e9),
        users=update_timeseries(old_stats['users'], len(web_users), time.time()*1e3),
        maxUsers=max(old_stats['maxUsers'], len(web_users)),
    )

    blocks = list(old_blocks)
    blocks_dict = dict((block['Id'], block) for block in blocks)
    assert len(blocks_dict) == len(blocks)
    def handle_block(block_data):
        block = block_data['block']
        
        txouts = block['txs'][0]['tx_outs']
        
        if len(txouts) < 25: return
        if not txouts[-1]['script'].startswith('\x6a'): return
        if len(txouts[-1]['script']) < 33: return
        if txouts[-1]['value'] != 0: return
        if txouts[-2]['script'] != p2pool_data.DONATION_SCRIPT: return
        
        hash_str = '%064x' % bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))
        print hash_str
        
        if hash_str not in blocks_dict:
            print 'inserted'
            x = dict(
                Id=hash_str,
                PrevBlock='%064x' % block['header']['previous_block'],
                GenerationTxHash='%064x' % block_data['gentx_hash'],
                BlockHeight=block_data['height'],
                Difficulty=bitcoin_data.target_to_difficulty(block['header']['bits'].target),
                Timestamp=block['header']['timestamp'],
                IsOrphaned=None, # XXX
            )
            blocks.append(x)
            blocks_dict[hash_str] = x
    yield get_blocks(b, 400, handle_block)
    blocks.sort(key=lambda x: -x['Timestamp'])

    # write

    _atomic_write(os.path.join(datadir, 'blocks_5'), json.dumps(blocks[:5]))
    _atomic_write(os.path.join(datadir, 'blocks_100'), json.dumps(blocks[:100]))
    _atomic_write(os.path.join(datadir, 'blocks'), json.dumps(blocks))
    _atomic_write(os.path.join(datadir, 'difficulty'), json.dumps(difficulty))
    #_atomic_write(os.path.join(datadir, 'donations'), json.dumps(donations))
    _atomic_write(os.path.join(datadir, 'payouts'), json.dumps(payouts))
    _atomic_write(os.path.join(datadir, 'stats'), json.dumps(stats))
    _atomic_write(os.path.join(datadir, 'users'), json.dumps(users))
コード例 #12
0
    def think(self, block_rel_height_func, previous_block, bits):
        desired = set()

        # O(len(self.heads))
        #   make 'unverified heads' set?
        # for each overall head, attempt verification
        # if it fails, attempt on parent, and repeat
        # if no successful verification because of lack of parents, request parent
        bads = set()
        for head in set(self.heads) - set(self.verified.heads):
            head_height, last = self.get_height_and_last(head)

            for share in self.get_chain(
                    head, head_height if last is None else min(
                        5, max(0, head_height - self.net.CHAIN_LENGTH))):
                if self.attempt_verify(share):
                    break
                if share.hash in self.heads:
                    bads.add(share.hash)
            else:
                if last is not None:
                    desired.add((
                        self.items[random.choice(list(
                            self.reverse[last]))].peer,
                        last,
                        max(x.timestamp for x in self.get_chain(
                            head, min(head_height, 5))),
                        min(x.target for x in self.get_chain(
                            head, min(head_height, 5))),
                    ))
        for bad in bads:
            assert bad not in self.verified.items
            assert bad in self.heads
            if p2pool.DEBUG:
                print "BAD", bad
            self.remove(bad)

        # try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
        for head in list(self.verified.heads):
            head_height, last_hash = self.verified.get_height_and_last(head)
            last_height, last_last_hash = self.get_height_and_last(last_hash)
            # XXX review boundary conditions
            want = max(self.net.CHAIN_LENGTH - head_height, 0)
            can = max(last_height - 1 - self.net.CHAIN_LENGTH,
                      0) if last_last_hash is not None else last_height
            get = min(want, can)
            #print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
            for share in self.get_chain(last_hash, get):
                if not self.attempt_verify(share):
                    break
            if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
                desired.add((
                    self.items[random.choice(
                        list(self.verified.reverse[last_hash]))].peer,
                    last_last_hash,
                    max(x.timestamp
                        for x in self.get_chain(head, min(head_height, 5))),
                    min(x.target
                        for x in self.get_chain(head, min(head_height, 5))),
                ))

        # decide best tree
        decorated_tails = sorted((self.score(
            max(self.verified.tails[tail_hash], key=self.verified.get_work),
            block_rel_height_func), tail_hash)
                                 for tail_hash in self.verified.tails)
        if p2pool.DEBUG:
            print len(decorated_tails), 'tails:'
            for score, tail_hash in decorated_tails:
                print format_hash(tail_hash), score
        best_tail_score, best_tail = decorated_tails[
            -1] if decorated_tails else (None, None)

        # decide best verified head
        decorated_heads = sorted((
            (
                self.verified.get_work(
                    self.verified.get_nth_parent_hash(
                        h, min(5, self.verified.get_height(h)))),
                #self.items[h].peer is None,
                self.items[h].pow_hash <=
                self.items[h].header['bits'].target,  # is block solution
                (self.items[h].header['previous_block'],
                 self.items[h].header['bits']) == (
                     previous_block, bits) or self.items[h].peer is None,
                -self.items[h].time_seen,
            ),
            h) for h in self.verified.tails.get(best_tail, []))
        if p2pool.DEBUG:
            print len(decorated_heads), 'heads. Top 10:'
            for score, head_hash in decorated_heads[-10:]:
                print '   ', format_hash(head_hash), format_hash(
                    self.items[head_hash].previous_hash), score
        best_head_score, best = decorated_heads[-1] if decorated_heads else (
            None, None)

        # eat away at heads
        if decorated_heads:
            for i in xrange(1000):
                to_remove = set()
                for share_hash, tail in self.heads.iteritems():
                    if share_hash in [
                            head_hash
                            for score, head_hash in decorated_heads[-5:]
                    ]:
                        #print 1
                        continue
                    if self.items[share_hash].time_seen > time.time() - 300:
                        #print 2
                        continue
                    if share_hash not in self.verified.items and max(
                            self.items[after_tail_hash].time_seen
                            for after_tail_hash in self.reverse.get(
                                tail)) > time.time() - 120:  # XXX stupid
                        #print 3
                        continue
                    to_remove.add(share_hash)
                if not to_remove:
                    break
                for share_hash in to_remove:
                    if share_hash in self.verified.items:
                        self.verified.remove(share_hash)
                    self.remove(share_hash)
                #print "_________", to_remove

        # drop tails
        for i in xrange(1000):
            to_remove = set()
            for tail, heads in self.tails.iteritems():
                if min(self.get_height(head)
                       for head in heads) < 2 * self.net.CHAIN_LENGTH + 10:
                    continue
                for aftertail in self.reverse.get(tail, set()):
                    if len(self.reverse[
                            self.items[aftertail].previous_hash]) > 1:  # XXX
                        print "raw"
                        continue
                    to_remove.add(aftertail)
            if not to_remove:
                break
            # if removed from this, it must be removed from verified
            #start = time.time()
            for aftertail in to_remove:
                if self.items[aftertail].previous_hash not in self.tails:
                    print "erk", aftertail, self.items[aftertail].previous_hash
                    continue
                if aftertail in self.verified.items:
                    self.verified.remove(aftertail)
                self.remove(aftertail)
            #end = time.time()
            #print "removed! %i %f" % (len(to_remove), (end - start)/len(to_remove))

        if best is not None:
            best_share = self.items[best]
            if (
                    best_share.header['previous_block'],
                    best_share.header['bits']
            ) != (
                    previous_block, bits
            ) and best_share.header_hash != previous_block and best_share.peer is not None:
                if p2pool.DEBUG:
                    print 'Stale detected! %x < %x' % (
                        best_share.header['previous_block'], previous_block)
                best = best_share.previous_hash

            timestamp_cutoff = min(int(time.time()),
                                   best_share.timestamp) - 3600
            target_cutoff = 2**256 // (
                self.net.SHARE_PERIOD * best_tail_score[1] +
                1) * 2 if best_tail_score[1] is not None else 2**256 - 1
        else:
            timestamp_cutoff = int(time.time()) - 24 * 60 * 60
            target_cutoff = 2**256 - 1

        if p2pool.DEBUG:
            print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (
                len(desired), math.format_dt(time.time() - timestamp_cutoff),
                bitcoin_data.target_to_difficulty(target_cutoff))
            for peer, hash, ts, targ in desired:
                print '   ', '%s:%i' % peer.addr if peer is not None else None, format_hash(
                    hash), math.format_dt(time.time(
                    ) - ts), bitcoin_data.target_to_difficulty(
                        targ), ts >= timestamp_cutoff, targ <= target_cutoff

        return best, [(peer, hash) for peer, hash, ts, targ in desired
                      if ts >= timestamp_cutoff]