Esempio n. 1
0
 def paytotal(self):
     self.payouttotal = 0.0
     for i in xrange(len(pubkeys.keys)):
         self.payouttotal += node.get_current_txouts().get(
             mue_data.pubkey_hash_to_script2(pubkeys.keys[i]),
             0) * 1e-8
     return self.payouttotal
Esempio n. 2
0
    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH,
                         60 * 60 // node.net.SHARE_PERIOD,
                         node.tracker.get_height(node.best_share_var.value))
        t = time.time()

        pool_rates = p2pool_data.get_stale_counts(node.tracker,
                                                  node.best_share_var.value,
                                                  lookbehind,
                                                  rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)

        current_txouts = node.get_current_txouts()
        my_current_payouts = 0.0
        for add in wb.pubkeys.keys:
            my_current_payouts += current_txouts.get(
                mue_data.pubkey_hash_to_script2(add), 0) * 1e-8
        hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        current_txouts_by_address = dict(
            (mue_data.script2_to_address(script, node.net.PARENT), amount)
            for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(
            t,
            dict((user, current_txouts_by_address[user] * 1e-8)
                 for user in miner_hash_rates
                 if user in current_txouts_by_address))

        hd.datastreams['peers'].add_datum(
            t,
            dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if not peer.incoming),
            ))

        vs = p2pool_data.get_desired_version_counts(node.tracker,
                                                    node.best_share_var.value,
                                                    lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_version_rates'].add_datum(
            t,
            dict((str(k), v / vs_total * pool_total)
                 for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()
Esempio n. 3
0
    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24 * 60 * 60:
            stat_log.pop(0)

        lookbehind = 3600 // node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None

        global_stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares,
         stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        my_current_payout = 0.0
        for add in wb.pubkeys.keys:
            my_current_payout += node.get_current_txouts().get(
                mue_data.pubkey_hash_to_script2(add), 0) * 1e-8

        stat_log.append(
            dict(
                time=time.time(),
                pool_hash_rate=p2pool_data.get_pool_attempts_per_second(
                    node.tracker, node.best_share_var.value, lookbehind) /
                (1 - global_stale_prop),
                pool_stale_prop=global_stale_prop,
                local_hash_rates=miner_hash_rates,
                local_dead_hash_rates=miner_dead_hash_rates,
                shares=shares,
                stale_shares=stale_orphan_shares + stale_doa_shares,
                stale_shares_breakdown=dict(orphan=stale_orphan_shares,
                                            doa=stale_doa_shares),
                current_payout=my_current_payout,
                peers=dict(
                    incoming=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if peer.incoming),
                    outgoing=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if not peer.incoming),
                ),
                attempts_to_share=mue_data.target_to_average_attempts(
                    node.tracker.items[node.best_share_var.value].max_target),
                attempts_to_block=mue_data.target_to_average_attempts(
                    node.mued_work.value['bits'].target),
                block_value=node.mued_work.value['subsidy'] * 1e-8,
            ))

        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))
Esempio n. 4
0
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues()
                            if peer.incoming),
                    ) + (' FDs: %i R/%i W' %
                         (len(reactor.getReaders()), len(reactor.getWriters()))
                         if p2pool.DEBUG else '')

                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work'] / dt for datum in datums)
                    my_shares_per_s = sum(datum['work'] / dt /
                                          mue_data.target_to_average_attempts(
                                              datum['share_target'])
                                          for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(
                            sum(1 for datum in datums if datum['dead']),
                            len(datums), 0.95),
                        math.format_dt(1 / my_shares_per_s)
                        if my_shares_per_s else '???',
                    )

                    if height > 2:
                        (stale_orphan_shares,
                         stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(
                            node.tracker, node.best_share_var.value,
                            min(60 * 60 // net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(
                            node.tracker, node.best_share_var.value,
                            min(height - 1, 60 * 60 //
                                net.SHARE_PERIOD)) / (1 - stale_prop)

                        paystr = ''
                        paytot = 0.0
                        for i in xrange(len(pubkeys.keys)):
                            curtot = node.get_current_txouts().get(
                                mue_data.pubkey_hash_to_script2(
                                    pubkeys.keys[i]), 0)
                            paytot += curtot * 1e-8
                            paystr += "(%.4f)" % (curtot * 1e-8, )
                        paystr += "=%.4f" % (paytot, )
                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % (
                            shares,
                            stale_orphan_shares,
                            stale_doa_shares,
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95),
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95, lambda x: (1 - x) / (1 - stale_prop)),
                            paystr,
                            net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100 * stale_prop,
                            math.format_dt(
                                2**256 / node.mued_work.value['bits'].target /
                                real_att_s),
                        )

                        for warning in p2pool_data.get_warnings(
                                node.tracker, node.best_share_var.value, net,
                                mued_getinfo_var.value, node.mued_work.value):
                            print >> sys.stderr, '#' * 40
                            print >> sys.stderr, '>>> Warning: ' + warning
                            print >> sys.stderr, '#' * 40

                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (
                                len(gc.garbage), map(type, gc.garbage))

                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()