コード例 #1
0
ファイル: web.py プロジェクト: acejam/p2pool-sucr
def get_web_root(wb, datadir_path, sucrd_getinfo_var, stop_event=variable.Event(), static_dir=None):
    node = wb.node
    start_time = time.time()
    
    web_root = resource.Resource()
    
    def get_users():
        height, last = node.tracker.get_height_and_last(node.best_share_var.value)
        weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
        res = {}
        for script in sorted(weights, key=lambda s: weights[s]):
            res[sucr_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
        return res
    
    def get_current_scaled_txouts(scale, trunc=0):
        txouts = node.get_current_txouts()
        total = sum(txouts.itervalues())
        results = dict((script, value*scale//total) for script, value in txouts.iteritems())
        if trunc > 0:
            total_random = 0
            random_set = set()
            for s in sorted(results, key=results.__getitem__):
                if results[s] >= trunc:
                    break
                total_random += results[s]
                random_set.add(s)
            if total_random:
                winner = math.weighted_choice((script, results[script]) for script in random_set)
                for script in random_set:
                    del results[script]
                results[winner] = total_random
        if sum(results.itervalues()) < int(scale):
            results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
        return results
    
    def get_patron_sendmany(total=None, trunc='0.01'):
        if total is None:
            return 'need total argument. go to patron_sendmany/<TOTAL>'
        total = int(float(total)*1e8)
        trunc = int(float(trunc)*1e8)
        return json.dumps(dict(
            (sucr_data.script2_to_address(script, node.net.PARENT), value/1e8)
            for script, value in get_current_scaled_txouts(total, trunc).iteritems()
            if sucr_data.script2_to_address(script, node.net.PARENT) is not None
        ))
    
    def get_global_stats():
        # averaged over last hour
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
        stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        diff = sucr_data.target_to_difficulty(wb.current_work.value['bits'].target)
        return dict(
            pool_nonstale_hash_rate=nonstale_hash_rate,
            pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
            pool_stale_prop=stale_prop,
            min_difficulty=sucr_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
            network_block_difficulty=diff,
            network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD),
        )
    
    def get_local_stats():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        
        my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
        my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
        my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
        my_share_count = my_unstale_count + my_orphan_count + my_doa_count
        my_stale_count = my_orphan_count + my_doa_count
        
        my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
        
        my_work = sum(sucr_data.target_to_average_attempts(share.target)
            for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
            if share.hash in wb.my_share_hashes)
        actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
            node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
        share_att_s = my_work / actual_time
        
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()

        miner_last_difficulties = {}
        for addr in wb.last_work_shares.value:
            miner_last_difficulties[addr] = sucr_data.target_to_difficulty(wb.last_work_shares.value[addr].target)

        return dict(
            my_hash_rates_in_last_hour=dict(
                note="DEPRECATED",
                nonstale=share_att_s,
                rewarded=share_att_s/(1 - global_stale_prop),
                actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
            ),
            my_share_counts_in_last_hour=dict(
                shares=my_share_count,
                unstale_shares=my_unstale_count,
                stale_shares=my_stale_count,
                orphan_stale_shares=my_orphan_count,
                doa_stale_shares=my_doa_count,
            ),
            my_stale_proportions_in_last_hour=dict(
                stale=my_stale_prop,
                orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
                dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
            ),
            miner_hash_rates=miner_hash_rates,
            miner_dead_hash_rates=miner_dead_hash_rates,
            miner_last_difficulties=miner_last_difficulties,
            efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
            efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            shares=dict(
                total=shares,
                orphan=stale_orphan_shares,
                dead=stale_doa_shares,
            ),
            uptime=time.time() - start_time,
            attempts_to_share=sucr_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=sucr_data.target_to_average_attempts(node.sucrd_work.value['bits'].target),
            block_value=node.sucrd_work.value['subsidy']*1e-8,
            warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, sucrd_getinfo_var.value, node.sucrd_work.value),
            donation_proportion=wb.donation_percentage/100,
            version=p2pool.__version__,
            protocol_version=p2p.Protocol.VERSION,
            fee=wb.worker_fee,
        )
    
    class WebInterface(deferred_resource.DeferredResource):
        def __init__(self, func, mime_type='application/json', args=()):
            deferred_resource.DeferredResource.__init__(self)
            self.func, self.mime_type, self.args = func, mime_type, args
        
        def getChild(self, child, request):
            return WebInterface(self.func, self.mime_type, self.args + (child,))
        
        @defer.inlineCallbacks
        def render_GET(self, request):
            request.setHeader('Content-Type', self.mime_type)
            request.setHeader('Access-Control-Allow-Origin', '*')
            res = yield self.func(*self.args)
            defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
    
    def decent_height():
        return min(node.tracker.get_height(node.best_share_var.value), 720)
    web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
    web_root.putChild('difficulty', WebInterface(lambda: sucr_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
    web_root.putChild('users', WebInterface(get_users))
    web_root.putChild('user_stales', WebInterface(lambda: dict((sucr_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
        p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
    web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
    web_root.putChild('current_payouts', WebInterface(lambda: dict((sucr_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
    web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
    web_root.putChild('global_stats', WebInterface(get_global_stats))
    web_root.putChild('local_stats', WebInterface(get_local_stats))
    web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
        dict([(a, (yield b)) for a, b in
            [(
                '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
                defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
                    min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
                ))()
            ) for peer in list(node.p2p_node.peers.itervalues())]
        ])
    ))))
    web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('payout_addr', WebInterface(lambda: sucr_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
    web_root.putChild('payout_addrs', WebInterface(lambda: list(('%s' % sucr_data.pubkey_hash_to_address(add, node.net.PARENT)) for add in wb.pubkeys.keys)))
    def height_from_coinbase(coinbase):
        opcode = ord(coinbase[0]) if len(coinbase) > 0 else 0
        if opcode >= 1 and opcode <= 75: 
            return pack.IntType(opcode*8).unpack(coinbase[1:opcode+1])
        if opcode == 76: 
            return pack.IntType(8).unpack(coinbase[1:2])
        if opcode == 77: 
            return pack.IntType(8).unpack(coinbase[1:3])
        if opcode == 78: 
            return pack.IntType(8).unpack(coinbase[1:5])
        if opcode >= 79 and opcode <= 96:
           return opcode - 80
        return None
    web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
        ts=s.timestamp,
        hash='%064x' % s.header_hash,
        number=height_from_coinbase(s.share_data['coinbase']),
        share='%064x' % s.hash,
    ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
    web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
    web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
    
    new_root = resource.Resource()
    web_root.putChild('web', new_root)
    
    stat_log = []
    if os.path.exists(os.path.join(datadir_path, 'stats')):
        try:
            with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
                stat_log = json.loads(f.read())
        except:
            log.err(None, 'Error loading stats:')
    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
            stat_log.pop(0)
        
        lookbehind = 3600//node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        my_current_payout=0.0
        for add in wb.pubkeys.keys:
            my_current_payout+=node.get_current_txouts().get(sucr_data.pubkey_hash_to_script2(add), 0)*1e-8
        
        stat_log.append(dict(
            time=time.time(),
            pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
            pool_stale_prop=global_stale_prop,
            local_hash_rates=miner_hash_rates,
            local_dead_hash_rates=miner_dead_hash_rates,
            shares=shares,
            stale_shares=stale_orphan_shares + stale_doa_shares,
            stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
            current_payout=my_current_payout,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            attempts_to_share=sucr_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=sucr_data.target_to_average_attempts(node.sucrd_work.value['bits'].target),
            block_value=node.sucrd_work.value['subsidy']*1e-8,
        ))
        
        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))
    x = deferral.RobustLoopingCall(update_stat_log)
    x.start(5*60)
    stop_event.watch(x.stop)
    new_root.putChild('log', WebInterface(lambda: stat_log))
    
    def get_share(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        
        return dict(
            parent='%064x' % share.previous_hash,
            far_parent='%064x' % share.share_info['far_share_hash'],
            children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
            type_name=type(share).__name__,
            local=dict(
                verified=share.hash in node.tracker.verified.items,
                time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
                peer_first_received_from=share.peer_addr,
            ),
            share_data=dict(
                timestamp=share.timestamp,
                target=share.target,
                max_target=share.max_target,
                payout_address=sucr_data.script2_to_address(share.new_script, node.net.PARENT),
                donation=share.share_data['donation']/65535,
                stale_info=share.share_data['stale_info'],
                nonce=share.share_data['nonce'],
                desired_version=share.share_data['desired_version'],
                absheight=share.absheight,
                abswork=share.abswork,
            ),
            block=dict(
                hash='%064x' % share.header_hash,
                header=dict(
                    version=share.header['version'],
                    previous_block='%064x' % share.header['previous_block'],
                    merkle_root='%064x' % share.header['merkle_root'],
                    timestamp=share.header['timestamp'],
                    target=share.header['bits'].target,
                    nonce=share.header['nonce'],
                ),
                gentx=dict(
                    hash='%064x' % share.gentx_hash,
                    coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
                    value=share.share_data['subsidy']*1e-8,
                    last_txout_nonce='%016x' % share.contents['last_txout_nonce'],
                ),
                other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)],
            ),
        )

    def get_share_address(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        return bitcoin_data.script2_to_address(share.new_script, node.net.PARENT)

    new_root.putChild('payout_address', WebInterface(lambda share_hash_str: get_share_address(share_hash_str)))
    new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
    new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
    new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
    new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
    new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
    new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
    new_root.putChild('my_share_hashes', WebInterface(lambda: ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes]))
    def get_share_data(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return ''
        share = node.tracker.items[int(share_hash_str, 16)]
        return p2pool_data.share_type.pack(share.as_share())
    new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
    new_root.putChild('currency_info', WebInterface(lambda: dict(
        symbol=node.net.PARENT.SYMBOL,
        block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
        address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
        tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
    )))
    new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
    
    hd_path = os.path.join(datadir_path, 'graph_db')
    hd_data = _atomic_read(hd_path)
    hd_obj = {}
    if hd_data is not None:
        try:
            hd_obj = json.loads(hd_data)
        except Exception:
            log.err(None, 'Error reading graph database:')
    dataview_descriptions = {
        'last_hour': graph.DataViewDescription(150, 60*60),
        'last_day': graph.DataViewDescription(300, 60*60*24),
        'last_week': graph.DataViewDescription(300, 60*60*24*7),
        'last_month': graph.DataViewDescription(300, 60*60*24*30),
        'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
    }
    hd = graph.HistoryDatabase.from_obj({
        'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_share_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False,
            multivalues=True, multivalue_undefined_means_0=True,
            default_func=graph.make_multivalue_migrator(dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'),
                post_func=lambda bins: [dict((k, (v[0] - (sum(bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins])),
        'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'current_payout': graph.DataStreamDescription(dataview_descriptions),
        'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
        'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming='incoming_peers', outgoing='outgoing_peers'))),
        'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
        'memory_usage': graph.DataStreamDescription(dataview_descriptions),
    }, hd_obj)
    x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
    x.start(100)
    stop_event.watch(x.stop)
    @wb.pseudoshare_received.watch
    def _(work, dead, user):
        t = time.time()
        hd.datastreams['local_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
        if user is not None:
            hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
            if dead:
                hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
    @wb.share_received.watch
    def _(work, dead, share_hash):
        t = time.time()
        if not dead:
            hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=work))
        else:
            hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=work))
        def later():
            res = node.tracker.is_child_of(share_hash, node.best_share_var.value)
            if res is None: res = False # share isn't connected to sharechain? assume orphaned
            if res and dead: # share was DOA, but is now in sharechain
                # move from dead to good
                hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=-work, good=work))
            elif not res and not dead: # share wasn't DOA, and isn't in sharechain
                # move from good to orphan
                hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=-work, orphan=work))
        reactor.callLater(200, later)
    @node.p2p_node.traffic_happened.watch
    def _(name, bytes):
        hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
        t = time.time()
        
        pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)
        
        current_txouts = node.get_current_txouts()
        my_current_payouts = 0.0
        for add in wb.pubkeys.keys:
             my_current_payouts += current_txouts.get(sucr_data.pubkey_hash_to_script2(add), 0)*1e-8
        hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        current_txouts_by_address = dict((sucr_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
        
        hd.datastreams['peers'].add_datum(t, dict(
            incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
            outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
        ))
        
        vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()
    x = deferral.RobustLoopingCall(add_point)
    x.start(5)
    stop_event.watch(x.stop)
    @node.sucrd_work.changed.watch
    def _(new_work):
        hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
    new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
    
    if static_dir is None:
        static_dir = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static')
    web_root.putChild('static', static.File(static_dir))
    
    return web_root
コード例 #2
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls,
                 worker_fee, args, pubkeys, dashd):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.dashd = dashd
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})

        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock on %s:' %
                    (merged_url, ), 30)(merged_proxy.rpc_getauxblock)()
                self.merged_work.set(
                    math.merge_dicts(
                        self.merged_work.value, {
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if auxblock['target']
                                == 'p2pool' else pack.IntType(256).unpack(
                                    auxblock['target'].decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.dashd_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        dash_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x! NewHeight=%s' % (
                    bb['previous_block'],
                    self.node.net.PARENT.BLOCKHASH_FUNC(
                        dash_data.block_header_type.pack(bb)),
                    t['height'] + 1,
                )
                t = dict(
                    version=bb['version'],
                    previous_block=self.node.net.PARENT.BLOCKHASH_FUNC(
                        dash_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600,  # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=dash_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.dashd_work.value['subsidy'],
                    last_update=self.node.dashd_work.value['last_update'],
                    payment_amount=self.node.dashd_work.
                    value['payment_amount'],
                    packed_payments=self.node.dashd_work.
                    value['packed_payments'],
                )

            self.current_work.set(t)

        self.node.dashd_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
コード例 #3
0
def main(args, net, datadir_path):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__, )
        print
        try:
            from . import draw
        except ImportError:
            draw = None
            print "Install Pygame and PIL to enable visualizations! Visualizations disabled."
            print

        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (
            url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.Proxy(
            url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password))
        good = yield deferral.retry('Error while checking bitcoind identity:',
                                    1)(net.BITCOIN_RPC_CHECK)(bitcoind)
        if not good:
            print "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
            return
        temp_work = yield getwork(bitcoind)
        print '    ...success!'
        print '    Current block hash: %x' % (
            temp_work['previous_block_hash'], )
        print

        # connect to bitcoind over bitcoin-p2p
        print '''Testing bitcoind P2P connection to '%s:%s'...''' % (
            args.bitcoind_address, args.bitcoind_p2p_port)
        factory = bitcoin_p2p.ClientFactory(net)
        reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port,
                           factory)
        yield factory.getProtocol()  # waits until handshake is successful
        print '    ...success!'
        print

        if args.pubkey_hash is None:
            print 'Getting payout address from bitcoind...'
            my_script = yield get_payout_script2(bitcoind, net)
        else:
            print 'Computing payout script from provided address....'
            my_script = bitcoin_data.pubkey_hash_to_script2(args.pubkey_hash)
        print '    ...success!'
        print '    Payout script:', bitcoin_data.script2_to_human(
            my_script, net)
        print

        ht = bitcoin_p2p.HeightTracker(bitcoind, factory)

        tracker = p2pool_data.OkayTracker(net)
        shared_share_hashes = set()
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
                if contents.hash in tracker.shares:
                    continue
                shared_share_hashes.add(contents.hash)
                contents.time_seen = 0
                tracker.add(contents)
                if len(tracker.shares) % 1000 == 0 and tracker.shares:
                    print "    %i" % (len(tracker.shares), )
            elif mode == 'verified_hash':
                known_verified.add(contents)
            else:
                raise AssertionError()
        print "    ...inserting %i verified shares..." % (
            len(known_verified), )
        for h in known_verified:
            if h not in tracker.shares:
                ss.forget_verified_share(h)
                continue
            tracker.verified.add(tracker.shares[h])
        print "    ...done loading %i shares!" % (len(tracker.shares), )
        print
        tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        tracker.verified.removed.watch(
            lambda share: ss.forget_verified_share(share.hash))
        tracker.removed.watch(
            lambda share: shared_share_hashes.discard(share.hash))

        peer_heads = expiring_dict.ExpiringDict(
            300)  # hash -> peers that know of it

        pre_current_work = variable.Variable(None)
        pre_current_work2 = variable.Variable(None)
        pre_merged_work = variable.Variable(None)
        # information affecting work that should trigger a long-polling update
        current_work = variable.Variable(None)
        # information affecting work that should not trigger a long-polling update
        current_work2 = variable.Variable(None)

        work_updated = variable.Event()

        requested = expiring_dict.ExpiringDict(300)

        @defer.inlineCallbacks
        def set_real_work1():
            work = yield getwork(bitcoind)
            pre_current_work2.set(
                dict(
                    time=work['time'],
                    transactions=work['transactions'],
                    subsidy=work['subsidy'],
                    clock_offset=time.time() - work['time'],
                    last_update=time.time(),
                ))  # second set first because everything hooks on the first
            pre_current_work.set(
                dict(
                    version=work['version'],
                    previous_block=work['previous_block_hash'],
                    target=work['target'],
                ))

        def set_real_work2():
            best, desired = tracker.think(
                ht, pre_current_work.value['previous_block'],
                time.time() - pre_current_work2.value['clock_offset'])

            current_work2.set(pre_current_work2.value)
            t = dict(pre_current_work.value)
            t['best_share_hash'] = best
            t['aux_work'] = pre_merged_work.value
            current_work.set(t)

            t = time.time()
            for peer2, share_hash in desired:
                if share_hash not in tracker.tails:  # was received in the time tracker.think was running
                    continue
                last_request_time, count = requested.get(share_hash, (None, 0))
                if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
                    continue
                potential_peers = set()
                for head in tracker.tails[share_hash]:
                    potential_peers.update(peer_heads.get(head, set()))
                potential_peers = [
                    peer for peer in potential_peers if peer.connected2
                ]
                if count == 0 and peer2 is not None and peer2.connected2:
                    peer = peer2
                else:
                    peer = random.choice(
                        potential_peers
                    ) if potential_peers and random.random() > .2 else peer2
                    if peer is None:
                        continue

                print 'Requesting parent share %s from %s' % (
                    p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
                peer.send_getshares(
                    hashes=[share_hash],
                    parents=2000,
                    stops=list(
                        set(tracker.heads) | set(
                            tracker.get_nth_parent_hash(
                                head,
                                min(
                                    max(
                                        0,
                                        tracker.get_height_and_last(head)[0] -
                                        1), 10))
                            for head in tracker.heads))[:100],
                )
                requested[share_hash] = t, count + 1

        pre_current_work.changed.watch(lambda _: set_real_work2())

        print 'Initializing work...'
        yield set_real_work1()
        print '    ...success!'
        print

        pre_merged_work.changed.watch(lambda _: set_real_work2())
        ht.updated.watch(set_real_work2)

        @defer.inlineCallbacks
        def set_merged_work():
            if not args.merged_url:
                return
            merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass, ))
            while True:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock:',
                    1)(merged.rpc_getauxblock)()
                pre_merged_work.set(
                    dict(
                        hash=int(auxblock['hash'], 16),
                        target=bitcoin_data.HashType().unpack(
                            auxblock['target'].decode('hex')),
                        chain_id=auxblock['chainid'],
                    ))
                yield deferral.sleep(1)

        set_merged_work()

        start_time = time.time() - current_work2.value['clock_offset']

        # setup p2p logic and join p2pool network

        def p2p_shares(shares, peer=None):
            if len(shares) > 5:
                print 'Processing %i shares...' % (len(shares), )

            new_count = 0
            for share in shares:
                if share.hash in tracker.shares:
                    #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
                    continue

                new_count += 1

                #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)

                tracker.add(share)

            if shares and peer is not None:
                peer_heads.setdefault(shares[0].hash, set()).add(peer)

            if new_count:
                set_real_work2()

            if len(shares) > 5:
                print '... done processing %i shares. New: %i Have: %i/~%i' % (
                    len(shares), new_count, len(
                        tracker.shares), 2 * net.CHAIN_LENGTH)

        @tracker.verified.added.watch
        def _(share):
            if share.pow_hash <= share.header['target']:
                if factory.conn.value is not None:
                    factory.conn.value.send_block(
                        block=share.as_block(tracker))
                else:
                    print 'No bitcoind connection! Erp!'
                print
                print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (
                    p2pool_data.format_hash(share.hash),
                    share.header_hash,
                )
                print

        def p2p_share_hashes(share_hashes, peer):
            t = time.time()
            get_hashes = []
            for share_hash in share_hashes:
                if share_hash in tracker.shares:
                    continue
                last_request_time, count = requested.get(share_hash, (None, 0))
                if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count:
                    continue
                print 'Got share hash, requesting! Hash: %s' % (
                    p2pool_data.format_hash(share_hash), )
                get_hashes.append(share_hash)
                requested[share_hash] = t, count + 1

            if share_hashes and peer is not None:
                peer_heads.setdefault(share_hashes[0], set()).add(peer)
            if get_hashes:
                peer.send_getshares(hashes=get_hashes, parents=0, stops=[])

        def p2p_get_shares(share_hashes, parents, stops, peer):
            parents = min(parents, 1000 // len(share_hashes))
            stops = set(stops)
            shares = []
            for share_hash in share_hashes:
                for share in tracker.get_chain(
                        share_hash,
                        min(parents + 1, tracker.get_height(share_hash))):
                    if share.hash in stops:
                        break
                    shares.append(share)
            print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0],
                                                  peer.addr[1])
            peer.sendShares(shares)

        print 'Joining p2pool network using port %i...' % (args.p2pool_port, )

        def parse(x):
            if ':' in x:
                ip, port = x.split(':')
                return ip, int(port)
            else:
                return x, net.P2P_PORT

        nodes = set([
            ('72.14.191.28', net.P2P_PORT),
            ('62.204.197.159', net.P2P_PORT),
            ('142.58.248.28', net.P2P_PORT),
            ('94.23.34.145', net.P2P_PORT),
        ])
        for host in [
                'p2pool.forre.st',
                'dabuttonfactory.com',
        ] + (['liteco.in'] if net.NAME == 'litecoin' else []) + []:
            try:
                nodes.add(((yield reactor.resolve(host)), net.P2P_PORT))
            except:
                log.err(None, 'Error resolving bootstrap node IP:')

        addrs = {}
        try:
            addrs = dict(
                eval(x) for x in open(os.path.join(datadir_path, 'addrs.txt')))
        except:
            print "error reading addrs"

        def save_addrs():
            open(os.path.join(datadir_path, 'addrs.txt'),
                 'w').writelines(repr(x) + '\n' for x in addrs.iteritems())

        task.LoopingCall(save_addrs).start(60)

        p2p_node = p2p.Node(
            current_work=current_work,
            port=args.p2pool_port,
            net=net,
            addr_store=addrs,
            preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes,
        )
        p2p_node.handle_shares = p2p_shares
        p2p_node.handle_share_hashes = p2p_share_hashes
        p2p_node.handle_get_shares = p2p_get_shares

        p2p_node.start()

        # send share when the chain changes to their chain
        def work_changed(new_work):
            #print 'Work changed:', new_work
            shares = []
            for share in tracker.get_chain(
                    new_work['best_share_hash'],
                    tracker.get_height(new_work['best_share_hash'])):
                if share.hash in shared_share_hashes:
                    break
                shared_share_hashes.add(share.hash)
                shares.append(share)

            for peer in p2p_node.peers.itervalues():
                peer.sendShares(
                    [share for share in shares if share.peer is not peer])

        current_work.changed.watch(work_changed)

        def save_shares():
            for share in tracker.get_chain(
                    current_work.value['best_share_hash'],
                    min(
                        tracker.get_height(
                            current_work.value['best_share_hash']),
                        2 * net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in tracker.verified.shares:
                    ss.add_verified_hash(share.hash)

        task.LoopingCall(save_shares).start(60)

        print '    ...success!'
        print

        @defer.inlineCallbacks
        def upnp_thread():
            while True:
                try:
                    is_lan, lan_ip = yield ipdiscover.get_local_ip()
                    if is_lan:
                        pm = yield portmapper.get_port_mapper()
                        yield pm._upnp.add_port_mapping(
                            lan_ip, args.p2pool_port, args.p2pool_port,
                            'p2pool',
                            'TCP')  # XXX try to forward external correct port?
                except defer.TimeoutError:
                    pass
                except:
                    if p2pool.DEBUG:
                        log.err(None, "UPnP error:")
                yield deferral.sleep(random.expovariate(1 / 120))

        if args.upnp:
            upnp_thread()

        # start listening for workers with a JSON-RPC server

        print 'Listening for workers on port %i...' % (args.worker_port, )

        # setup worker logic

        merkle_root_to_transactions = expiring_dict.ExpiringDict(300)
        run_identifier = struct.pack('<I', random.randrange(2**32))

        share_counter = skiplists.CountsSkipList(tracker, run_identifier)
        removed_unstales = set()

        def get_share_counts(doa=False):
            height, last = tracker.get_height_and_last(
                current_work.value['best_share_hash'])
            matching_in_chain = share_counter(
                current_work.value['best_share_hash'],
                height) | removed_unstales
            shares_in_chain = my_shares & matching_in_chain
            stale_shares = my_shares - matching_in_chain
            if doa:
                stale_doa_shares = stale_shares & doa_shares
                stale_not_doa_shares = stale_shares - stale_doa_shares
                return len(shares_in_chain) + len(stale_shares), len(
                    stale_doa_shares), len(stale_not_doa_shares)
            return len(shares_in_chain) + len(stale_shares), len(stale_shares)

        @tracker.verified.removed.watch
        def _(share):
            if share.hash in my_shares and tracker.is_child_of(
                    share.hash, current_work.value['best_share_hash']):
                removed_unstales.add(share.hash)

        def get_payout_script_from_username(user):
            if user is None:
                return None
            try:
                return bitcoin_data.pubkey_hash_to_script2(
                    bitcoin_data.address_to_pubkey_hash(user, net))
            except:  # XXX blah
                return None

        def compute(request):
            state = current_work.value
            user = worker_interface.get_username(request)

            payout_script = get_payout_script_from_username(user)
            if payout_script is None or random.uniform(0,
                                                       100) < args.worker_fee:
                payout_script = my_script

            if len(p2p_node.peers) == 0 and net.PERSIST:
                raise jsonrpc.Error(-12345,
                                    u'p2pool is not connected to any peers')
            if state['best_share_hash'] is None and net.PERSIST:
                raise jsonrpc.Error(-12345, u'p2pool is downloading shares')
            if time.time() > current_work2.value['last_update'] + 60:
                raise jsonrpc.Error(-12345, u'lost contact with bitcoind')

            previous_share = None if state[
                'best_share_hash'] is None else tracker.shares[
                    state['best_share_hash']]
            subsidy = current_work2.value['subsidy']
            share_info, generate_tx = p2pool_data.generate_transaction(
                tracker=tracker,
                share_data=dict(
                    previous_share_hash=state['best_share_hash'],
                    coinbase='' if state['aux_work'] is None else
                    '\xfa\xbemm' + bitcoin_data.HashType().pack(
                        state['aux_work']['hash'])[::-1] +
                    struct.pack('<ii', 1, 0),
                    nonce=run_identifier +
                    struct.pack('<Q', random.randrange(2**64)),
                    new_script=payout_script,
                    subsidy=subsidy,
                    donation=math.perfect_round(
                        65535 * args.donation_percentage / 100),
                    stale_frac=(lambda shares, stales: 255 if shares == 0 else
                                math.perfect_round(254 * stales / shares))(
                                    *get_share_counts()),
                ),
                block_target=state['target'],
                desired_timestamp=int(time.time() -
                                      current_work2.value['clock_offset']),
                net=net,
            )

            print 'New work for worker %s! Difficulty: %.06f Payout if block: %.6f %s Total block value: %.6f %s including %i transactions' % (
                user,
                bitcoin_data.target_to_difficulty(share_info['target']),
                (sum(t['value'] for t in generate_tx['tx_outs']
                     if t['script'] == payout_script) - subsidy // 200) * 1e-8,
                net.BITCOIN_SYMBOL,
                subsidy * 1e-8,
                net.BITCOIN_SYMBOL,
                len(current_work2.value['transactions']),
            )

            transactions = [generate_tx] + list(
                current_work2.value['transactions'])
            merkle_root = bitcoin_data.merkle_hash(transactions)
            merkle_root_to_transactions[
                merkle_root] = share_info, transactions, time.time()

            return bitcoin_getwork.BlockAttempt(
                state['version'], state['previous_block'], merkle_root,
                current_work2.value['time'], state['target'],
                share_info['target']), state['best_share_hash']

        my_shares = set()
        doa_shares = set()

        def got_response(header, request):
            try:
                user = worker_interface.get_username(request)
                # match up with transactions
                xxx = merkle_root_to_transactions.get(header['merkle_root'],
                                                      None)
                if xxx is None:
                    print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool'''
                    return False
                share_info, transactions, getwork_time = xxx

                hash_ = bitcoin_data.block_header_type.hash256(header)

                pow_hash = net.BITCOIN_POW_FUNC(header)

                if pow_hash <= header['target'] or p2pool.DEBUG:
                    if factory.conn.value is not None:
                        factory.conn.value.send_block(
                            block=dict(header=header, txs=transactions))
                    else:
                        print 'No bitcoind connection! Erp!'
                    if pow_hash <= header['target']:
                        print
                        print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (
                            hash_, )
                        print

                if current_work.value[
                        'aux_work'] is not None and pow_hash <= current_work.value[
                            'aux_work']['target']:
                    try:
                        aux_pow = dict(
                            merkle_tx=dict(
                                tx=transactions[0],
                                block_hash=hash_,
                                merkle_branch=[
                                    x['hash'] for x in p2pool_data.
                                    calculate_merkle_branch(transactions, 0)
                                ],
                                index=0,
                            ),
                            merkle_branch=[],
                            index=0,
                            parent_block_header=header,
                        )

                        a, b = transactions[0]['tx_ins'][0]['script'][
                            -32 - 8:-8].encode(
                                'hex'), bitcoin_data.aux_pow_type.pack(
                                    aux_pow).encode('hex')
                        #print a, b
                        merged = jsonrpc.Proxy(args.merged_url,
                                               (args.merged_userpass, ))

                        def _(res):
                            print "MERGED RESULT:", res

                        merged.rpc_getauxblock(a, b).addBoth(_)
                    except:
                        log.err(None,
                                'Error while processing merged mining POW:')

                target = share_info['target']
                if pow_hash > target:
                    print 'Worker submitted share with hash > target:\nhash  : %x\ntarget: %x' % (
                        pow_hash, target)
                    return False
                share = p2pool_data.Share(net,
                                          header,
                                          share_info,
                                          other_txs=transactions[1:])
                my_shares.add(share.hash)
                if share.previous_hash != current_work.value['best_share_hash']:
                    doa_shares.add(share.hash)
                print 'GOT SHARE! %s %s prev %s age %.2fs' % (
                    user, p2pool_data.format_hash(share.hash),
                    p2pool_data.format_hash(share.previous_hash), time.time() -
                    getwork_time) + (' DEAD ON ARRIVAL' if share.previous_hash
                                     != current_work.value['best_share_hash']
                                     else '')
                good = share.previous_hash == current_work.value[
                    'best_share_hash']
                # maybe revert back to tracker being non-blocking so 'good' can be more accurate?
                p2p_shares([share])
                # eg. good = share.hash == current_work.value['best_share_hash'] here
                return good
            except:
                log.err(None, 'Error processing data received from worker:')
                return False

        web_root = worker_interface.WorkerInterface(compute, got_response,
                                                    current_work.changed)

        def get_rate():
            if current_work.value['best_share_hash'] is not None:
                height, last = tracker.get_height_and_last(
                    current_work.value['best_share_hash'])
                att_s = p2pool_data.get_pool_attempts_per_second(
                    tracker, current_work.value['best_share_hash'],
                    min(height - 1, 720))
                fracs = [
                    share.stale_frac for share in tracker.get_chain(
                        current_work.value['best_share_hash'], min(
                            120, height)) if share.stale_frac is not None
                ]
                return json.dumps(
                    int(att_s / (1. - (math.median(fracs) if fracs else 0))))
            return json.dumps(None)

        def get_users():
            height, last = tracker.get_height_and_last(
                current_work.value['best_share_hash'])
            weights, total_weight, donation_weight = tracker.get_cumulative_weights(
                current_work.value['best_share_hash'], min(height, 720),
                65535 * 2**256)
            res = {}
            for script in sorted(weights, key=lambda s: weights[s]):
                res[bitcoin_data.script2_to_human(
                    script, net)] = weights[script] / total_weight
            return json.dumps(res)

        class WebInterface(resource.Resource):
            def __init__(self, func, mime_type):
                self.func, self.mime_type = func, mime_type

            def render_GET(self, request):
                request.setHeader('Content-Type', self.mime_type)
                return self.func()

        web_root.putChild('rate', WebInterface(get_rate, 'application/json'))
        web_root.putChild('users', WebInterface(get_users, 'application/json'))
        web_root.putChild(
            'fee',
            WebInterface(lambda: json.dumps(args.worker_fee),
                         'application/json'))
        if draw is not None:
            web_root.putChild(
                'chain_img',
                WebInterface(
                    lambda: draw.get(tracker, current_work.value[
                        'best_share_hash']), 'image/png'))

        reactor.listenTCP(args.worker_port, server.Site(web_root))

        print '    ...success!'
        print

        # done!

        # do new getwork when a block is heard on the p2p interface

        def new_block(block_hash):
            work_updated.happened()

        factory.new_block.watch(new_block)

        print 'Started successfully!'
        print

        @defer.inlineCallbacks
        def work1_thread():
            while True:
                flag = work_updated.get_deferred()
                try:
                    yield set_real_work1()
                except:
                    log.err()
                yield defer.DeferredList(
                    [flag, deferral.sleep(random.uniform(1, 10))],
                    fireOnOneCallback=True)

        @defer.inlineCallbacks
        def work2_thread():
            while True:
                try:
                    set_real_work2()
                except:
                    log.err()
                yield deferral.sleep(random.expovariate(1 / 20))

        work1_thread()
        work2_thread()

        if hasattr(signal, 'SIGALRM'):

            def watchdog_handler(signum, frame):
                print 'Watchdog timer went off at:'
                traceback.print_stack()

            signal.signal(signal.SIGALRM, watchdog_handler)
            task.LoopingCall(signal.alarm, 30).start(1)

        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    if time.time() > current_work2.value['last_update'] + 60:
                        print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---'''
                    if current_work.value['best_share_hash'] is not None:
                        height, last = tracker.get_height_and_last(
                            current_work.value['best_share_hash'])
                        if height > 2:
                            att_s = p2pool_data.get_pool_attempts_per_second(
                                tracker, current_work.value['best_share_hash'],
                                min(height - 1, 720))
                            weights, total_weight, donation_weight = tracker.get_cumulative_weights(
                                current_work.value['best_share_hash'],
                                min(height, 720), 65535 * 2**256)
                            shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(
                                True)
                            stale_shares = stale_doa_shares + stale_not_doa_shares
                            fracs = [
                                share.stale_frac
                                for share in tracker.get_chain(
                                    current_work.value['best_share_hash'],
                                    min(120, height))
                                if share.stale_frac is not None
                            ]
                            this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % (
                                math.format(
                                    int(att_s /
                                        (1. -
                                         (math.median(fracs) if fracs else 0)))
                                ),
                                height,
                                len(tracker.verified.shares),
                                len(tracker.shares),
                                weights.get(my_script, 0) / total_weight * 100,
                                math.format(
                                    int(
                                        weights.get(my_script, 0) * att_s //
                                        total_weight /
                                        (1. -
                                         (math.median(fracs) if fracs else 0)))
                                ),
                                shares,
                                stale_not_doa_shares,
                                stale_doa_shares,
                                len(p2p_node.peers),
                            ) + (' FDs: %i R/%i W' %
                                 (len(reactor.getReaders()),
                                  len(reactor.getWriters()))
                                 if p2pool.DEBUG else '')
                            if fracs:
                                med = math.median(fracs)
                                this_str += '\nPool stales: %i%%' % (
                                    int(100 * med + .5), )
                                conf = 0.95
                                if shares:
                                    this_str += u' Own: %i±%i%%' % tuple(
                                        int(100 * x + .5) for x in
                                        math.interval_to_center_radius(
                                            math.binomial_conf_interval(
                                                stale_shares, shares, conf)))
                                    if med < .99:
                                        this_str += u' Own efficiency: %i±%i%%' % tuple(
                                            int(100 * x + .5) for x in
                                            math.interval_to_center_radius(
                                                (1 - y) / (1 - med) for y in
                                                math.binomial_conf_interval(
                                                    stale_shares, shares,
                                                    conf)[::-1]))
                            if this_str != last_str or time.time(
                            ) > last_time + 15:
                                print this_str
                                last_str = this_str
                                last_time = time.time()
                except:
                    log.err()

        status_thread()
    except:
        log.err(None, 'Fatal error:')
コード例 #4
0
def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()):
    node = wb.node
    start_time = time.time()
    
    web_root = resource.Resource()
    
    def get_users():
        height, last = node.tracker.get_height_and_last(node.best_share_var.value)
        weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256)
        res = {}
        for script in sorted(weights, key=lambda s: weights[s]):
            res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight
        return res
    
    def get_current_scaled_txouts(scale, trunc=0):
        txouts = node.get_current_txouts()
        total = sum(txouts.itervalues())
        results = dict((script, value*scale//total) for script, value in txouts.iteritems())
        if trunc > 0:
            total_random = 0
            random_set = set()
            for s in sorted(results, key=results.__getitem__):
                if results[s] >= trunc:
                    break
                total_random += results[s]
                random_set.add(s)
            if total_random:
                winner = math.weighted_choice((script, results[script]) for script in random_set)
                for script in random_set:
                    del results[script]
                results[winner] = total_random
        if sum(results.itervalues()) < int(scale):
            results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues())
        return results
    
    def get_patron_sendmany(total=None, trunc='0.01'):
        if total is None:
            return 'need total argument. go to patron_sendmany/<TOTAL>'
        total = int(float(total)*1e8)
        trunc = int(float(trunc)*1e8)
        return json.dumps(dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8)
            for script, value in get_current_scaled_txouts(total, trunc).iteritems()
            if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None
        ))
    
    def get_local_rates():
        miner_hash_rates = {}
        miner_dead_hash_rates = {}
        datums, dt = wb.local_rate_monitor.get_datums_in_last()
        for datum in datums:
            miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt
            if datum['dead']:
                miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt
        return miner_hash_rates, miner_dead_hash_rates
    
    def get_global_stats():
        # averaged over last hour
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)
        stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        return dict(
            pool_nonstale_hash_rate=nonstale_hash_rate,
            pool_hash_rate=nonstale_hash_rate/(1 - stale_prop),
            pool_stale_prop=stale_prop,
            min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target),
        )
    
    def get_local_stats():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD)
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        
        my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes)
        my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan')
        my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa')
        my_share_count = my_unstale_count + my_orphan_count + my_doa_count
        my_stale_count = my_orphan_count + my_doa_count
        
        my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None
        
        my_work = sum(bitcoin_data.target_to_average_attempts(share.target)
            for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
            if share.hash in wb.my_share_hashes)
        actual_time = (node.tracker.items[node.best_share_var.value].timestamp -
            node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp)
        share_att_s = my_work / actual_time
        
        miner_hash_rates, miner_dead_hash_rates = get_local_rates()
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
        
        return dict(
            my_hash_rates_in_last_hour=dict(
                note="DEPRECATED",
                nonstale=share_att_s,
                rewarded=share_att_s/(1 - global_stale_prop),
                actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway
            ),
            my_share_counts_in_last_hour=dict(
                shares=my_share_count,
                unstale_shares=my_unstale_count,
                stale_shares=my_stale_count,
                orphan_stale_shares=my_orphan_count,
                doa_stale_shares=my_doa_count,
            ),
            my_stale_proportions_in_last_hour=dict(
                stale=my_stale_prop,
                orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None,
                dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None,
            ),
            miner_hash_rates=miner_hash_rates,
            miner_dead_hash_rates=miner_dead_hash_rates,
            efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
            efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            shares=dict(
                total=shares,
                orphan=stale_orphan_shares,
                dead=stale_doa_shares,
            ),
            uptime=time.time() - start_time,
            attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy']*1e-8,
            warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value),
            donation_proportion=wb.donation_percentage/100,
        )
    
    class WebInterface(deferred_resource.DeferredResource):
        def __init__(self, func, mime_type='application/json', args=()):
            deferred_resource.DeferredResource.__init__(self)
            self.func, self.mime_type, self.args = func, mime_type, args
        
        def getChild(self, child, request):
            return WebInterface(self.func, self.mime_type, self.args + (child,))
        
        @defer.inlineCallbacks
        def render_GET(self, request):
            request.setHeader('Content-Type', self.mime_type)
            request.setHeader('Access-Control-Allow-Origin', '*')
            res = yield self.func(*self.args)
            defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res)
    
    def decent_height():
        return min(node.tracker.get_height(node.best_share_var.value), 720)
    web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height()))))
    web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target)))
    web_root.putChild('users', WebInterface(get_users))
    web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in
        p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems())))
    web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
    web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems())))
    web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain'))
    web_root.putChild('global_stats', WebInterface(get_global_stats))
    web_root.putChild('local_stats', WebInterface(get_local_stats))
    web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in node.p2p_node.peers.itervalues()]))
    web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue(
        dict([(a, (yield b)) for a, b in
            [(
                '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port),
                defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
                    min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)])
                ))()
            ) for peer in list(node.p2p_node.peers.itervalues())]
        ])
    ))))
    web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)))
    web_root.putChild('recent_blocks', WebInterface(lambda: [dict(
        ts=s.timestamp,
        hash='%064x' % s.header_hash,
        number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None,
        share='%064x' % s.hash,
    ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target]))
    web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
    web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True)))
    
    new_root = resource.Resource()
    web_root.putChild('web', new_root)
    
    stat_log = []
    if os.path.exists(os.path.join(datadir_path, 'stats')):
        try:
            with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
                stat_log = json.loads(f.read())
        except:
            log.err(None, 'Error loading stats:')
    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24*60*60:
            stat_log.pop(0)
        
        lookbehind = 3600//node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None
        
        global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = get_local_rates()
        
        stat_log.append(dict(
            time=time.time(),
            pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop),
            pool_stale_prop=global_stale_prop,
            local_hash_rates=miner_hash_rates,
            local_dead_hash_rates=miner_dead_hash_rates,
            shares=shares,
            stale_shares=stale_orphan_shares + stale_doa_shares,
            stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares),
            current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming),
            ),
            attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy']*1e-8,
        ))
        
        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))
    x = task.LoopingCall(update_stat_log)
    x.start(5*60)
    stop_event.watch(x.stop)
    new_root.putChild('log', WebInterface(lambda: stat_log))
    
    def get_share(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        
        return dict(
            parent='%064x' % share.previous_hash,
            children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children
            type_name=type(share).__name__,
            local=dict(
                verified=share.hash in node.tracker.verified.items,
                time_first_seen=start_time if share.time_seen == 0 else share.time_seen,
                peer_first_received_from=share.peer_addr,
            ),
            share_data=dict(
                timestamp=share.timestamp,
                target=share.target,
                max_target=share.max_target,
                payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT),
                donation=share.share_data['donation']/65535,
                stale_info=share.share_data['stale_info'],
                nonce=share.share_data['nonce'],
                desired_version=share.share_data['desired_version'],
            ),
            block=dict(
                hash='%064x' % share.header_hash,
                header=dict(
                    version=share.header['version'],
                    previous_block='%064x' % share.header['previous_block'],
                    merkle_root='%064x' % share.header['merkle_root'],
                    timestamp=share.header['timestamp'],
                    target=share.header['bits'].target,
                    nonce=share.header['nonce'],
                ),
                gentx=dict(
                    hash='%064x' % share.gentx_hash,
                    coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'),
                    value=share.share_data['subsidy']*1e-8,
                ),
                txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])],
            ),
        )
    new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str)))
    new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
    new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads]))
    new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]))
    new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())]))
    new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value))
    def get_share_data(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return ''
        share = node.tracker.items[int(share_hash_str, 16)]
        return p2pool_data.share_type.pack(share.as_share1a())
    new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream'))
    new_root.putChild('currency_info', WebInterface(lambda: dict(
        symbol=node.net.PARENT.SYMBOL,
        block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
        address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX,
    )))
    new_root.putChild('version', WebInterface(lambda: p2pool.__version__))
    
    hd_path = os.path.join(datadir_path, 'graph_db')
    hd_data = _atomic_read(hd_path)
    hd_obj = {}
    if hd_data is not None:
        try:
            hd_obj = json.loads(hd_data)
        except Exception:
            log.err(None, 'Error reading graph database:')
    dataview_descriptions = {
        'last_hour': graph.DataViewDescription(150, 60*60),
        'last_day': graph.DataViewDescription(300, 60*60*24),
        'last_week': graph.DataViewDescription(300, 60*60*24*7),
        'last_month': graph.DataViewDescription(300, 60*60*24*30),
        'last_year': graph.DataViewDescription(300, 60*60*24*365.25),
    }
    def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj):
        if not obj:
            last_bin_end = 0
            bins = dv_desc.bin_count*[{}]
        else:
            pool_rates = obj['pool_rates'][dv_name]
            desired_versions = obj['desired_versions'][dv_name]
            def get_total_pool_rate(t):
                n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width)
                if n < 0 or n >= dv_desc.bin_count:
                    return None
                total = sum(x[0] for x in pool_rates['bins'][n].values())
                count = math.mean(x[1] for x in pool_rates['bins'][n].values())
                if count == 0:
                    return None
                return total/count
            last_bin_end = desired_versions['last_bin_end']
            bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)]
        return graph.DataView(dv_desc, ds_desc, last_bin_end, bins)
    hd = graph.HistoryDatabase.from_obj({
        'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
        'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'current_payout': graph.DataStreamDescription(dataview_descriptions),
        'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True),
        'incoming_peers': graph.DataStreamDescription(dataview_descriptions),
        'outgoing_peers': graph.DataStreamDescription(dataview_descriptions),
        'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True),
        'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True,
            multivalue_undefined_means_0=True, default_func=build_desired_rates),
        'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True),
        'getwork_latency': graph.DataStreamDescription(dataview_descriptions),
        'memory_usage': graph.DataStreamDescription(dataview_descriptions),
    }, hd_obj)
    x = task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
    x.start(100)
    stop_event.watch(x.stop)
    @wb.pseudoshare_received.watch
    def _(work, dead, user):
        t = time.time()
        hd.datastreams['local_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
        if user is not None:
            hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
            if dead:
                hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work})
    @wb.share_received.watch
    def _(work, dead):
        t = time.time()
        hd.datastreams['local_share_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work)
    @node.p2p_node.traffic_happened.watch
    def _(name, bytes):
        hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})
    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value))
        t = time.time()
        
        pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)
        
        current_txouts = node.get_current_txouts()
        hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8)
        miner_hash_rates, miner_dead_hash_rates = get_local_rates()
        current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address))
        
        hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming))
        hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming))
        
        vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems()))
        hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()
    x = task.LoopingCall(add_point)
    x.start(5)
    stop_event.watch(x.stop)
    @node.bitcoind_work.changed.watch
    def _(new_work):
        hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency'])
    new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())))
    
    web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static')))
    
    return web_root
コード例 #5
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, worker_fee,
                 args, pubkeys, dcrd):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.dcrd = dcrd
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.dcrd_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        decred_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    decred_data.hash256(
                        decred_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=decred_data.hash256(
                        decred_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 300,  # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=decred_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(
                        self.node.dcrd_work.value['height']),
                    last_update=self.node.dcrd_work.value['last_update'],
                )

            self.current_work.set(t)

        self.node.dcrd_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
コード例 #6
0
    def __init__(self, my_pubkey_hash, net, donation_percentage, bitcoind_work,
                 best_block_header, merged_urls, best_share_var, tracker,
                 my_share_hashes, my_doa_share_hashes, worker_fee, p2p_node,
                 submit_block, set_best_share, broadcast_share,
                 block_height_var):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.my_pubkey_hash = my_pubkey_hash
        self.net = net
        self.donation_percentage = donation_percentage
        self.bitcoind_work = bitcoind_work
        self.best_block_header = best_block_header
        self.best_share_var = best_share_var
        self.tracker = tracker
        self.my_share_hashes = my_share_hashes
        self.my_doa_share_hashes = my_doa_share_hashes
        self.worker_fee = worker_fee
        self.p2p_node = p2p_node
        self.submit_block = submit_block
        self.set_best_share = set_best_share
        self.broadcast_share = broadcast_share
        self.block_height_var = block_height_var

        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        @tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and tracker.is_child_of(
                    share.hash, self.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.tracker.is_child_of(
                    share.hash, self.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.Proxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while True:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock:',
                    30)(merged_proxy.rpc_getauxblock)()
                self.merged_work.set(
                    dict(
                        self.merged_work.value, **{
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if auxblock['target']
                                == 'p2pool' else pack.IntType(256).unpack(
                                    auxblock['target'].decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.bitcoind_work.value
            bb = self.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and net.PARENT.POW_FUNC(
                        bitcoin_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600,  # better way?
                    transactions=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=net.PARENT.SUBSIDY_FUNC(
                        self.block_height_var.value),
                    last_update=self.bitcoind_work.value['last_update'],
                )

            self.current_work.set(t)

        self.bitcoind_work.changed.watch(lambda _: compute_work())
        self.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
コード例 #7
0
ファイル: main.py プロジェクト: alaudidae/p2pool
def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__, )
        print

        traffic_happened = variable.Event()

        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (
                args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port,
                               factory)
            yield factory.getProtocol()  # waits until handshake is successful
            print '    ...success!'
            print
            defer.returnValue(factory)

        if args.testnet:  # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()

        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http',
                               args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (
            url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.Proxy(
            url,
            dict(Authorization='Basic ' +
                 base64.b64encode(args.bitcoind_rpc_username + ':' +
                                  args.bitcoind_rpc_password)),
            timeout=30)

        @deferral.retry('Error while checking Bitcoin connection:', 1)
        @defer.inlineCallbacks
        def check():
            if not (yield net.PARENT.RPC_CHECK(bitcoind)):
                print >> sys.stderr, "    Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!"
                raise deferral.RetrySilentlyException()
            if not net.VERSION_CHECK(
                (yield bitcoind.rpc_getinfo())['version']):
                print >> sys.stderr, '    Bitcoin version too old! Upgrade to 0.6.4 or newer!'
                raise deferral.RetrySilentlyException()

        yield check()
        temp_work = yield getwork(bitcoind)

        if not args.testnet:
            factory = yield connect_p2p()

        block_height_var = variable.Variable(None)

        @defer.inlineCallbacks
        def poll_height():
            block_height_var.set(
                (yield deferral.retry('Error while calling getblockcount:')(
                    bitcoind.rpc_getblockcount)()))

        yield poll_height()
        task.LoopingCall(poll_height).start(60 * 60)

        bitcoind_warning_var = variable.Variable(None)

        @defer.inlineCallbacks
        def poll_warnings():
            errors = (yield
                      deferral.retry('Error while calling getmininginfo:')(
                          bitcoind.rpc_getmininginfo)())['errors']
            bitcoind_warning_var.set(errors if errors != '' else None)

        yield poll_warnings()
        task.LoopingCall(poll_warnings).start(20 * 60)

        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'], )
        print '    Current block height: %i' % (block_height_var.value, )
        print

        print 'Determining payout address...'
        if args.pubkey_hash is None:
            address_path = os.path.join(datadir_path, 'cached_payout_address')

            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address, )
            else:
                address = None

            if address is not None:
                res = yield deferral.retry(
                    'Error validating cached address:',
                    5)(lambda: bitcoind.rpc_validateaddress(address))()
                if not res['isvalid'] or not res['ismine']:
                    print '    Cached address is either invalid or not controlled by local bitcoind!'
                    address = None

            if address is None:
                print '    Getting payout address from bitcoind...'
                address = yield deferral.retry(
                    'Error getting payout address from bitcoind:',
                    5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()

            with open(address_path, 'wb') as f:
                f.write(address)

            my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(
                address, net.PARENT)
        else:
            my_pubkey_hash = args.pubkey_hash
        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(
            my_pubkey_hash, net.PARENT)
        print

        my_share_hashes = set()
        my_doa_share_hashes = set()

        tracker = p2pool_data.OkayTracker(net, my_share_hashes,
                                          my_doa_share_hashes)
        shared_share_hashes = set()
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
                if contents.hash in tracker.items:
                    continue
                shared_share_hashes.add(contents.hash)
                contents.time_seen = 0
                tracker.add(contents)
                if len(tracker.items) % 1000 == 0 and tracker.items:
                    print "    %i" % (len(tracker.items), )
            elif mode == 'verified_hash':
                known_verified.add(contents)
            else:
                raise AssertionError()
        print "    ...inserting %i verified shares..." % (
            len(known_verified), )
        for h in known_verified:
            if h not in tracker.items:
                ss.forget_verified_share(h)
                continue
            tracker.verified.add(tracker.items[h])
        print "    ...done loading %i shares!" % (len(tracker.items), )
        print
        tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        tracker.verified.removed.watch(
            lambda share: ss.forget_verified_share(share.hash))
        tracker.removed.watch(
            lambda share: shared_share_hashes.discard(share.hash))

        print 'Initializing work...'

        # BITCOIND WORK

        bitcoind_work = variable.Variable((yield getwork(bitcoind)))

        @defer.inlineCallbacks
        def work_poller():
            while True:
                flag = factory.new_block.get_deferred()
                try:
                    bitcoind_work.set(
                        (yield
                         getwork(bitcoind,
                                 bitcoind_work.value['use_getblocktemplate'])))
                except:
                    log.err()
                yield defer.DeferredList([flag, deferral.sleep(15)],
                                         fireOnOneCallback=True)

        work_poller()

        # PEER WORK

        best_block_header = variable.Variable(None)

        def handle_header(new_header):
            # check that header matches current target
            if not (net.PARENT.POW_FUNC(
                    bitcoin_data.block_header_type.pack(new_header)) <=
                    bitcoind_work.value['bits'].target):
                return
            bitcoind_best_block = bitcoind_work.value['previous_block']
            if (best_block_header.value is None or
                (new_header['previous_block'] == bitcoind_best_block
                 and bitcoin_data.hash256(
                     bitcoin_data.block_header_type.pack(
                         best_block_header.value)) == bitcoind_best_block
                 )  # new is child of current and previous is current
                    or (bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(new_header))
                        == bitcoind_best_block
                        and best_block_header.value['previous_block'] !=
                        bitcoind_best_block)
                ):  # new is current and previous is not a child of current
                best_block_header.set(new_header)

        @defer.inlineCallbacks
        def poll_header():
            handle_header((yield factory.conn.value.get_block_header(
                bitcoind_work.value['previous_block'])))

        bitcoind_work.changed.watch(lambda _: poll_header())
        yield deferral.retry('Error while requesting best block header:')(
            poll_header)()

        # BEST SHARE

        get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(
            bitcoind, factory, lambda: bitcoind_work.value['previous_block'],
            net)

        best_share_var = variable.Variable(None)
        desired_var = variable.Variable(None)

        def set_best_share():
            best, desired = tracker.think(
                get_height_rel_highest, bitcoind_work.value['previous_block'],
                bitcoind_work.value['bits'])

            best_share_var.set(best)
            desired_var.set(desired)

        bitcoind_work.changed.watch(lambda _: set_best_share())
        set_best_share()

        print '    ...success!'
        print

        # setup p2p logic and join p2pool network

        class Node(p2p.Node):
            def handle_shares(self, shares, peer):
                if len(shares) > 5:
                    print 'Processing %i shares from %s...' % (
                        len(shares),
                        '%s:%i' % peer.addr if peer is not None else None)

                new_count = 0
                for share in shares:
                    if share.hash in tracker.items:
                        #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),)
                        continue

                    new_count += 1

                    #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None)

                    tracker.add(share)

                if new_count:
                    set_best_share()

                if len(shares) > 5:
                    print '... done processing %i shares. New: %i Have: %i/~%i' % (
                        len(shares), new_count, len(
                            tracker.items), 2 * net.CHAIN_LENGTH)

            @defer.inlineCallbacks
            def handle_share_hashes(self, hashes, peer):
                new_hashes = [x for x in hashes if x not in tracker.items]
                if not new_hashes:
                    return
                try:
                    shares = yield peer.get_shares(
                        hashes=new_hashes,
                        parents=0,
                        stops=[],
                    )
                except:
                    log.err(None, 'in handle_share_hashes:')
                else:
                    self.handle_shares(shares, peer)

            def handle_get_shares(self, hashes, parents, stops, peer):
                parents = min(parents, 1000 // len(hashes))
                stops = set(stops)
                shares = []
                for share_hash in hashes:
                    for share in tracker.get_chain(
                            share_hash,
                            min(parents + 1, tracker.get_height(share_hash))):
                        if share.hash in stops:
                            break
                        shares.append(share)
                print 'Sending %i shares to %s:%i' % (
                    len(shares), peer.addr[0], peer.addr[1])
                return shares

            def handle_bestblock(self, header, peer):
                if net.PARENT.POW_FUNC(
                        bitcoin_data.block_header_type.pack(
                            header)) > header['bits'].target:
                    raise p2p.PeerMisbehavingError(
                        'received block header fails PoW test')
                handle_header(header)

        @deferral.retry('Error submitting primary block: (will retry)', 10, 10)
        def submit_block_p2p(block):
            if factory.conn.value is None:
                print >> sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (
                    net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                    bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(block['header'])))
                raise deferral.RetrySilentlyException()
            factory.conn.value.send_block(block=block)

        @deferral.retry('Error submitting block: (will retry)', 10, 10)
        @defer.inlineCallbacks
        def submit_block_rpc(block, ignore_failure):
            if bitcoind_work.value['use_getblocktemplate']:
                result = yield bitcoind.rpc_submitblock(
                    bitcoin_data.block_type.pack(block).encode('hex'))
                success = result is None
            else:
                result = yield bitcoind.rpc_getmemorypool(
                    bitcoin_data.block_type.pack(block).encode('hex'))
                success = result
            success_expected = net.PARENT.POW_FUNC(
                bitcoin_data.block_header_type.pack(
                    block['header'])) <= block['header']['bits'].target
            if (not success and success_expected
                    and not ignore_failure) or (success
                                                and not success_expected):
                print >> sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (
                    success, result, success_expected)

        def submit_block(block, ignore_failure):
            submit_block_p2p(block)
            submit_block_rpc(block, ignore_failure)

        @tracker.verified.added.watch
        def _(share):
            if share.pow_hash <= share.header['bits'].target:
                submit_block(share.as_block(tracker), ignore_failure=True)
                print
                print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (
                    p2pool_data.format_hash(share.hash),
                    net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
                print

                def spread():
                    if (get_height_rel_highest(share.header['previous_block'])
                            > -5 or bitcoind_work.value['previous_block'] in [
                                share.header['previous_block'],
                                share.header_hash
                            ]):
                        broadcast_share(share.hash)

                spread()
                reactor.callLater(
                    5, spread)  # so get_height_rel_highest can update

        print 'Joining p2pool network using port %i...' % (args.p2pool_port, )

        @defer.inlineCallbacks
        def parse(x):
            if ':' in x:
                ip, port = x.split(':')
                defer.returnValue(((yield reactor.resolve(ip)), int(port)))
            else:
                defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))

        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(
                        dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >> sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()

        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()

        p2p_node = Node(
            best_share_hash_func=lambda: best_share_var.value,
            port=args.p2pool_port,
            net=net,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            max_incoming_conns=args.p2pool_conns,
            traffic_happened=traffic_happened,
        )
        p2p_node.start()

        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(p2p_node.addr_store.items()))

        task.LoopingCall(save_addrs).start(60)

        @best_block_header.changed.watch
        def _(header):
            for peer in p2p_node.peers.itervalues():
                peer.send_bestblock(header=header)

        @defer.inlineCallbacks
        def broadcast_share(share_hash):
            shares = []
            for share in tracker.get_chain(
                    share_hash, min(5, tracker.get_height(share_hash))):
                if share.hash in shared_share_hashes:
                    break
                shared_share_hashes.add(share.hash)
                shares.append(share)

            for peer in list(p2p_node.peers.itervalues()):
                yield peer.sendShares(
                    [share for share in shares if share.peer is not peer])

        # send share when the chain changes to their chain
        best_share_var.changed.watch(broadcast_share)

        def save_shares():
            for share in tracker.get_chain(
                    best_share_var.value,
                    min(tracker.get_height(best_share_var.value),
                        2 * net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in tracker.verified.items:
                    ss.add_verified_hash(share.hash)

        task.LoopingCall(save_shares).start(60)

        @apply
        @defer.inlineCallbacks
        def download_shares():
            while True:
                desired = yield desired_var.get_when_satisfies(
                    lambda val: len(val) != 0)
                peer2, share_hash = random.choice(desired)

                if len(p2p_node.peers) == 0:
                    yield deferral.sleep(1)
                    continue
                peer = random.choice(p2p_node.peers.values())

                print 'Requesting parent share %s from %s' % (
                    p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr)
                try:
                    shares = yield peer.get_shares(
                        hashes=[share_hash],
                        parents=500,
                        stops=[],
                    )
                except:
                    log.err(None, 'in download_shares:')
                    continue

                if not shares:
                    yield deferral.sleep(
                        1
                    )  # sleep so we don't keep rerequesting the same share nobody has
                    continue
                p2p_node.handle_shares(shares, peer)

        print '    ...success!'
        print

        if args.upnp:

            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(
                                lan_ip, args.p2pool_port, args.p2pool_port,
                                'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1 / 120))

            upnp_thread()

        # start listening for workers with a JSON-RPC server

        print 'Listening for workers on %r port %i...' % (worker_endpoint[0],
                                                          worker_endpoint[1])

        get_current_txouts = lambda: p2pool_data.get_expected_payouts(
            tracker, best_share_var.value, bitcoind_work.value['bits'].target,
            bitcoind_work.value['subsidy'], net)

        wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage,
                               bitcoind_work, best_block_header, merged_urls,
                               best_share_var, tracker, my_share_hashes,
                               my_doa_share_hashes, args.worker_fee, p2p_node,
                               submit_block, set_best_share, broadcast_share,
                               block_height_var)
        web_root = web.get_web_root(
            tracker, bitcoind_work, get_current_txouts, datadir_path, net,
            wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor,
            args.worker_fee, p2p_node, wb.my_share_hashes,
            wb.pseudoshare_received, wb.share_received, best_share_var,
            bitcoind_warning_var, traffic_happened)
        worker_interface.WorkerInterface(wb).attach_to(
            web_root, get_handler=lambda request: request.redirect('/static/'))

        deferral.retry('Error binding to worker port:', traceback=False)(
            reactor.listenTCP)(worker_endpoint[1],
                               server.Site(web_root),
                               interface=worker_endpoint[0])

        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')),
                  'wb') as f:
            pass

        print '    ...success!'
        print

        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (
            worker_endpoint[1], )
        if args.donation_percentage > 0.51:
            print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (
                args.donation_percentage, )
        elif args.donation_percentage < 0.49:
            print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (
                args.donation_percentage, )
        else:
            print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (
                args.donation_percentage, )
            print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print

        if hasattr(signal, 'SIGALRM'):
            signal.signal(
                signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                    sys.stderr.write, 'Watchdog timer went off at:\n' + ''.
                    join(traceback.format_stack())))
            signal.siginterrupt(signal.SIGALRM, False)
            task.LoopingCall(signal.alarm, 30).start(1)

        if args.irc_announce:
            from twisted.words.protocols import irc

            class IRCClient(irc.IRCClient):
                nickname = 'p2pool%02i' % (random.randrange(100), )
                channel = net.ANNOUNCE_CHANNEL

                def lineReceived(self, line):
                    if p2pool.DEBUG:
                        print repr(line)
                    irc.IRCClient.lineReceived(self, line)

                def signedOn(self):
                    irc.IRCClient.signedOn(self)
                    self.factory.resetDelay()
                    self.join(self.channel)

                    @defer.inlineCallbacks
                    def new_share(share):
                        if share.pow_hash <= share.header[
                                'bits'].target and abs(share.timestamp -
                                                       time.time()) < 10 * 60:
                            yield deferral.sleep(random.expovariate(1 / 60))
                            message = '\x02%s BLOCK FOUND by %s! %s%064x' % (
                                net.NAME.upper(),
                                bitcoin_data.script2_to_address(
                                    share.new_script, net.PARENT),
                                net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                                share.header_hash)
                            if all('%x' %
                                   (share.header_hash, ) not in old_message
                                   for old_message in self.recent_messages):
                                self.say(self.channel, message)
                                self._remember_message(message)

                    self.watch_id = tracker.verified.added.watch(new_share)
                    self.recent_messages = []

                def _remember_message(self, message):
                    self.recent_messages.append(message)
                    while len(self.recent_messages) > 100:
                        self.recent_messages.pop(0)

                def privmsg(self, user, channel, message):
                    if channel == self.channel:
                        self._remember_message(message)

                def connectionLost(self, reason):
                    tracker.verified.added.unwatch(self.watch_id)
                    print 'IRC connection lost:', reason.getErrorMessage()

            class IRCClientFactory(protocol.ReconnectingClientFactory):
                protocol = IRCClient

            reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())

        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = tracker.get_height(best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(tracker.verified.items),
                        len(tracker.items),
                        len(p2p_node.peers),
                        sum(1 for peer in p2p_node.peers.itervalues()
                            if peer.incoming),
                    ) + (' FDs: %i R/%i W' %
                         (len(reactor.getReaders()), len(reactor.getWriters()))
                         if p2pool.DEBUG else '')

                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work'] / dt for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(
                            sum(1 for datum in datums if datum['dead']),
                            len(datums), 0.95),
                        math.format_dt(2**256 / tracker.items[
                            best_share_var.value].max_target / my_att_s)
                        if my_att_s and best_share_var.value else '???',
                    )

                    if height > 2:
                        (stale_orphan_shares,
                         stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(
                            tracker, best_share_var.value,
                            min(60 * 60 // net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(
                            tracker, best_share_var.value,
                            min(height - 1, 60 * 60 //
                                net.SHARE_PERIOD)) / (1 - stale_prop)

                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
                            shares,
                            stale_orphan_shares,
                            stale_doa_shares,
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95),
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95, lambda x: (1 - x) / (1 - stale_prop)),
                            get_current_txouts().get(
                                bitcoin_data.pubkey_hash_to_script2(
                                    my_pubkey_hash), 0) * 1e-8,
                            net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100 * stale_prop,
                            math.format_dt(
                                2**256 / bitcoind_work.value['bits'].target /
                                real_att_s),
                        )

                        for warning in p2pool_data.get_warnings(
                                tracker, best_share_var.value, net,
                                bitcoind_warning_var.value,
                                bitcoind_work.value):
                            print >> sys.stderr, '#' * 40
                            print >> sys.stderr, '>>> Warning: ' + warning
                            print >> sys.stderr, '#' * 40

                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()

        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')
コード例 #8
0
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls,
                 worker_fee, min_difficulty, share_rate, share_rate_type, args,
                 pubkeys, bitcoind):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []

        self.node = node

        self.bitcoind = bitcoind
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = donation_percentage
        self.worker_fee = worker_fee
        self.min_difficulty = min_difficulty
        self.share_rate = share_rate
        self.share_rate_type = share_rate_type

        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10 * 60)
        self.local_addr_rate_monitor = math.RateMonitor(10 * 60)

        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)

        self.last_work_shares = variable.Variable({})
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        self.tracker_view = forest.TrackerView(
            self.node.tracker,
            forest.get_attributedelta_type(
                dict(
                    forest.AttributeDelta.attrs,
                    my_count=lambda share: 1
                    if share.hash in self.my_share_hashes else 0,
                    my_doa_count=lambda share: 1
                    if share.hash in self.my_doa_share_hashes else 0,
                    my_orphan_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'orphan' else 0,
                    my_dead_announce_count=lambda share: 1
                    if share.hash in self.my_share_hashes and share.share_data[
                        'stale_info'] == 'doa' else 0,
                )))

        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [
                    None, 'orphan', 'doa'
                ]  # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] +
                    (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] +
                    (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(
                    share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(
                    self.removed_doa_unstales_var.value + 1)

        # MERGED WORK

        self.merged_work = variable.Variable({})

        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(
                merged_url,
                dict(Authorization='Basic ' +
                     base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry(
                    'Error while calling merged getauxblock on %s:' %
                    (merged_url, ), 30)(merged_proxy.rpc_getauxblock)()
                target = auxblock[
                    'target'] if 'target' in auxblock else auxblock['_target']
                self.merged_work.set(
                    math.merge_dicts(
                        self.merged_work.value, {
                            auxblock['chainid']:
                            dict(
                                hash=int(auxblock['hash'], 16),
                                target='p2pool' if target == 'p2pool' else
                                pack.IntType(256).unpack(target.decode('hex')),
                                merged_proxy=merged_proxy,
                            )
                        }))
                yield deferral.sleep(1)

        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)

        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'

        # COMBINE WORK

        self.current_work = variable.Variable(None)

        def compute_work():
            t = self.node.bitcoind_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t[
                    'previous_block'] and self.node.net.PARENT.POW_FUNC(
                        bitcoin_data.block_header_type.pack(
                            bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (
                    bb['previous_block'],
                    bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(
                        bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'],  # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=max(int(time.time() + 0.5), bb['timestamp'] + 1),
                    transactions=[],
                    transaction_hashes=[],
                    transaction_fees=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(
                        self.node.bitcoind_work.value['height']),
                    last_update=t['last_update'],
                    skipping=self.current_work.value.get('skipping', 3) -
                    1 if self.current_work.value is not None else 2)

            self.current_work.set(t)

        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()

        self.new_work_event = variable.Event()

        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x]
                   for x in ['version', 'previous_block', 'bits']) or (
                       not before['transactions'] and after['transactions']):
                self.new_work_event.happened()
            # refetch block template if best block changed
            if after.get('skipping', -2) >= 0:
                time.sleep(0.5)
                self.node.bitcoind_work.set((yield helper.getwork(
                    self.bitcoind,
                    self.node.bitcoind_work.value['use_getblocktemplate'])))
            elif after.get('skipping', -2) == -1:
                # revert if bitcoind doesn't have the new block
                h = yield self.bitcoind.rpc_getblockheader(
                    (yield self.bitcoind.rpc_getbestblockhash()))
                self.node.best_block_header.set(
                    dict(version=h['version'],
                         previous_block=int(h['previousblockhash'], 16),
                         merkle_root=int(h['merkleroot'], 16),
                         timestamp=h['time'],
                         bits=bitcoin_data.FloatingIntegerType().unpack(
                             h['bits'].decode('hex')[::-1]) if isinstance(
                                 h['bits'], (str, unicode)) else
                         bitcoin_data.FloatingInteger(h['bits']),
                         nonce=h['nonce']))

        self.merged_work.changed.watch(
            lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(
            lambda _: self.new_work_event.happened())
コード例 #9
0
ファイル: work.py プロジェクト: kissxrl/p2pool_scchain
    def __init__(self, node, my_pubkey_hash, donation_percentage, merged_urls, worker_fee, args, pubkeys, bitcoind):
        worker_interface.WorkerBridge.__init__(self)
        self.recent_shares_ts_work = []
        
        self.node = node

        self.bitcoind = bitcoind
        self.pubkeys = pubkeys
        self.args = args
        self.my_pubkey_hash = my_pubkey_hash

        self.donation_percentage = args.donation_percentage
        self.worker_fee = args.worker_fee
        
        self.net = self.node.net.PARENT
        self.running = True
        self.pseudoshare_received = variable.Event()
        self.share_received = variable.Event()
        self.local_rate_monitor = math.RateMonitor(10*60)
        self.local_addr_rate_monitor = math.RateMonitor(10*60)
        
        self.removed_unstales_var = variable.Variable((0, 0, 0))
        self.removed_doa_unstales_var = variable.Variable(0)
        
        self.last_work_shares = variable.Variable( {} )
        self.my_share_hashes = set()
        self.my_doa_share_hashes = set()

        self.address_throttle = 0

        # DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
        # print data.script2_to_address(DONATION_SCRIPT, node.net.PARENT)
        # print data.address_to_pubkey_hash("1E482inuE9GckE6kXoX5sBCTD7g4rgGgfN",node.net.PARENT)


        self.tracker_view = forest.TrackerView(self.node.tracker, forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
            my_count=lambda share: 1 if share.hash in self.my_share_hashes else 0,
            my_doa_count=lambda share: 1 if share.hash in self.my_doa_share_hashes else 0,
            my_orphan_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'orphan' else 0,
            my_dead_announce_count=lambda share: 1 if share.hash in self.my_share_hashes and share.share_data['stale_info'] == 'doa' else 0,
        )))
        
        @self.node.tracker.verified.removed.watch
        def _(share):
            if share.hash in self.my_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                assert share.share_data['stale_info'] in [None, 'orphan', 'doa'] # we made these shares in this instance
                self.removed_unstales_var.set((
                    self.removed_unstales_var.value[0] + 1,
                    self.removed_unstales_var.value[1] + (1 if share.share_data['stale_info'] == 'orphan' else 0),
                    self.removed_unstales_var.value[2] + (1 if share.share_data['stale_info'] == 'doa' else 0),
                ))
            if share.hash in self.my_doa_share_hashes and self.node.tracker.is_child_of(share.hash, self.node.best_share_var.value):
                self.removed_doa_unstales_var.set(self.removed_doa_unstales_var.value + 1)
        
        # MERGED WORK
        
        self.merged_work = variable.Variable({})
        
        @defer.inlineCallbacks
        def set_merged_work(merged_url, merged_userpass):
            merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass)))
            while self.running:
                auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)()
                target = auxblock['target'] if 'target' in auxblock else auxblock['_target']
                self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict(
                    hash=int(auxblock['hash'], 16),
                    target='p2pool' if target == 'p2pool' else pack.IntType(256).unpack(target.decode('hex')),
                    merged_proxy=merged_proxy,
                )}))
                yield deferral.sleep(1)
        for merged_url, merged_userpass in merged_urls:
            set_merged_work(merged_url, merged_userpass)
        
        @self.merged_work.changed.watch
        def _(new_merged_work):
            print 'Got new merged mining work!'
        
        # COMBINE WORK
        
        self.current_work = variable.Variable(None)
        def compute_work():
            t = self.node.bitcoind_work.value
            bb = self.node.best_block_header.value
            if bb is not None and bb['previous_block'] == t['previous_block'] and self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(bb)) <= t['bits'].target:
                print 'Skipping from block %x to block %x!' % (bb['previous_block'],
                    bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)))
                t = dict(
                    version=bb['version'],
                    previous_block=bitcoin_data.hash256(bitcoin_data.block_header_type.pack(bb)),
                    bits=bb['bits'], # not always true
                    coinbaseflags='',
                    height=t['height'] + 1,
                    time=bb['timestamp'] + 600, # better way?
                    transactions=[],
                    transaction_fees=[],
                    merkle_link=bitcoin_data.calculate_merkle_link([None], 0),
                    subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.node.bitcoind_work.value['height']),
                    last_update=self.node.bitcoind_work.value['last_update'],
                )
            
            self.current_work.set(t)
        self.node.bitcoind_work.changed.watch(lambda _: compute_work())
        self.node.best_block_header.changed.watch(lambda _: compute_work())
        compute_work()
        
        self.new_work_event = variable.Event()
        @self.current_work.transitioned.watch
        def _(before, after):
            # trigger LP if version/previous_block/bits changed or transactions changed from nothing
            if any(before[x] != after[x] for x in ['version', 'previous_block', 'bits']) or (not before['transactions'] and after['transactions']):
                self.new_work_event.happened()
        self.merged_work.changed.watch(lambda _: self.new_work_event.happened())
        self.node.best_share_var.changed.watch(lambda _: self.new_work_event.happened())