Ejemplo n.º 1
0
def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__,)
        print
        
        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory)
            def long():
                print '''    ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...'''
            long_dc = reactor.callLater(5, long)
            yield factory.getProtocol() # waits until handshake is successful
            if not long_dc.called: long_dc.cancel()
            print '    ...success!'
            print
            defer.returnValue(factory)
        
        if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()
        
        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30)
        yield helper.check(bitcoind, net, args)
        temp_work = yield helper.getwork(bitcoind)
        
        bitcoind_getinfo_var = variable.Variable(None)
        @defer.inlineCallbacks
        def poll_warnings():
            bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getnetworkinfo)()))
        yield poll_warnings()
        deferral.RobustLoopingCall(poll_warnings).start(20*60)
        
        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'],)
        print '    Current block height: %i' % (temp_work['height'] - 1,)
        print
        
        if not args.testnet:
            factory = yield connect_p2p()
        
        print 'Determining payout address...'
        pubkeys = keypool()
        if args.pubkey_hash is None and args.address != 'dynamic':
            address_path = os.path.join(datadir_path, 'cached_payout_address')
            
            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address,)
            else:
                address = None
            
            if address is not None:
                try:
                    res = yield deferral.retry('Error validating cached address:', 5, 1)(lambda: bitcoind.rpc_getaddressinfo(address))()# rpc_validateaddress(address))()
                    if not res['ismine']:
                        print '    Cached address is invalid!'
                        address = None
                    else:
                    # checks address controlled by local bitcoind
                        res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_getaddressinfo(address))()
                        if not res['ismine']:
                            print '    Cached address is not controlled by local bitcoind!'
                            address = None
                        # validateaddress DEPRECATION WARNING: Parts of this command have been deprecated and moved to getaddressinfo.
                        # Clients must transition to using getaddressinfo to access this information before upgrading to v0.18.
                        # The following deprecated fields have moved to getaddressinfo and will only be shown here with 
                        # -deprecatedrpc=validateaddress: ismine, iswatchonly, script, hex, pubkeys, sigsrequired, pubkey, addresses, embedded, iscompressed, account, timestamp, hdkeypath, kdmasterkeyid.
                except Exception:
                    print '    Cached address is invalid!'
                    address = None
                                
            if address is None:
                print "    Getting payout address from bitcoind labeled \'p2pool\'..."
                # you should assign label 'p2pool' to Your wallet mining address before
                try:
                    address = yield deferral.retry('Error getting payout address from bitcoind:', 5, 1)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))()
                except Exception:
                    print u"\u001b[31m   Bitcoind has no \'p2pool\' labeled address! Please, specify default payout address! Exiting...\u001b[0m"
                    exit()

                print "    Overwriting cached address from bitcoind \'p2pool\' labeled address"
                with open(address_path, 'wb') as f:
                    f.write(address)

            my_address = address
            print(u'\u001b[32m    ...success! Payout address: %s\u001b[0m' % my_address)
            print()

            pubkeys.addkey({'address': my_address})
        elif args.address != 'dynamic':
            my_address = args.address
            print('    ...success! Payout address: %s' % my_address)
            print()

            pubkeys.addkey({'address': my_address})
        else:
            print '    Entering dynamic address mode.'

            if args.numaddresses < 2:
                print ' ERROR: Can not use fewer than 2 addresses in dynamic mode. Resetting to 2.'
                args.numaddresses = 2
            for i in range(args.numaddresses):
                address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: bitcoind.rpc_getnewaddress('p2pool'))()
                pubkeys.addkey({'address': address})

            pubkeys.updatestamp(time.time())

            my_address = pubkeys.keys[0]['address']

            for i in range(len(pubkeys.keys)):
                print('    ...payout %d: %s' % (i, pubkeys[i]['address']))
        
        print "Loading shares..."
        shares = {}
        known_verified = set()
        def share_cb(share):
            share.time_seen = 0 # XXX
            shares[share.hash] = share
            if len(shares) % 1000 == 0 and shares:
                print "    %i" % (len(shares),)
        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add)
        print "    ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified))
        print
        
        
        print 'Initializing work...'
        
        global gnode
        gnode = node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net)
        yield node.start()
        
        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash))
        
        def save_shares():
            for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in node.tracker.verified.items:
                    ss.add_verified_hash(share.hash)
        deferral.RobustLoopingCall(save_shares).start(60)

        if len(shares) > net.CHAIN_LENGTH:
            best_share = shares[node.best_share_var.value]
            previous_share = shares[best_share.share_data['previous_share_hash']]
            counts = p2pool_data.get_desired_version_counts(node.tracker, node.tracker.get_nth_parent_hash(previous_share.hash, net.CHAIN_LENGTH*9//10), net.CHAIN_LENGTH//10)
            p2pool_data.update_min_protocol_version(counts, best_share)
        
        print '    ...success!'
        print
        
        
        print 'Joining p2pool network using port %i...' % (args.p2pool_port,)
        
        @defer.inlineCallbacks
        def parse(host):
            port = net.P2P_PORT
            if ':' in host:
                host, port_str = host.split(':')
                port = int(port_str)
            defer.returnValue(((yield reactor.resolve(host)), port))
        
        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >>sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()
        
        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()
        
        node.p2p_node = p2pool_node.P2PNode(node,
            port=args.p2pool_port,
            max_incoming_conns=args.p2pool_conns,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            desired_outgoing_conns=args.p2pool_outgoing_conns,
            advertise_ip=args.advertise_ip,
            external_ip=args.p2pool_external_ip,
        )
        node.p2p_node.start()
        
        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(node.p2p_node.addr_store.items()))
        deferral.RobustLoopingCall(save_addrs).start(60)
        
        print '    ...success!'
        print
        
        if args.upnp:
            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1/120))
            upnp_thread()
        
        # start listening for workers with a JSON-RPC server
        
        print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1])
        
        wb = work.WorkerBridge(node, my_address, args.donation_percentage,
                               merged_urls, args.worker_fee, args, pubkeys,
                               bitcoind, args.share_rate)
        web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var, static_dir=args.web_static)
        caching_wb = worker_interface.CachingWorkerBridge(wb)
        worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/'))
        web_serverfactory = server.Site(web_root)
        
        serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
        deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0])
        
        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f:
            pass
        
        print '    ...success!'
        print
        
        
        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],)
        if args.donation_percentage > 1.1:
            print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,)
        elif args.donation_percentage < .9:
            print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,)
        else:
            print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,)
            print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print
        
        
        if hasattr(signal, 'SIGALRM'):
            signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack())
            ))
            signal.siginterrupt(signal.SIGALRM, False)
            deferral.RobustLoopingCall(signal.alarm, 30).start(1)
        
        if args.irc_announce:
            from twisted.words.protocols import irc
            class IRCClient(irc.IRCClient):
                nickname = 'p2pool%02i' % (random.randrange(100),)
                channel = net.ANNOUNCE_CHANNEL
                def lineReceived(self, line):
                    if p2pool.DEBUG:
                        print repr(line)
                    irc.IRCClient.lineReceived(self, line)
                def signedOn(self):
                    self.in_channel = False
                    irc.IRCClient.signedOn(self)
                    self.factory.resetDelay()
                    self.join(self.channel)
                    @defer.inlineCallbacks
                    def new_share(share):
                        if not self.in_channel:
                            return
                        if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
                            yield deferral.sleep(random.expovariate(1/60))
                            message = '\x02%s BLOCK FOUND by %s! %s%064x' % (
                                    net.NAME.upper(),
                                    bitcoin_data.script2_to_address(
                                        share.new_script, net.ADDRESS_VERSION,
                                        -1, net.PARENT) if
                                            share.VERSION < 34 else
                                                share.address,
                                    net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                                    share.header_hash)
                            if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
                                self.say(self.channel, message)
                                self._remember_message(message)
                    self.watch_id = node.tracker.verified.added.watch(new_share)
                    self.recent_messages = []
                def joined(self, channel):
                    self.in_channel = True
                def left(self, channel):
                    self.in_channel = False
                def _remember_message(self, message):
                    self.recent_messages.append(message)
                    while len(self.recent_messages) > 100:
                        self.recent_messages.pop(0)
                def privmsg(self, user, channel, message):
                    if channel == self.channel:
                        self._remember_message(message)
                def connectionLost(self, reason):
                    node.tracker.verified.added.unwatch(self.watch_id)
                    print 'IRC connection lost:', reason.getErrorMessage()
            class IRCClientFactory(protocol.ReconnectingClientFactory):
                protocol = IRCClient
            reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0))
        
        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(30)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming),
                    ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '')
                    
                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work']/dt for datum in datums)
                    my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95),
                        math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???',
                    )
                    
                    if height > 2:
                        (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop)
                        
                        paystr = ''
                        paytot = 0.0
                        for i in range(len(pubkeys.keys)):
                            curtot = node.get_current_txouts().get(
                                    pubkeys.keys[i]['address'], 0)
                            paytot += curtot*1e-8
                            paystr += "(%.4f)" % (curtot*1e-8,)
                        paystr += "=%.4f" % (paytot,)
                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % (
                            shares, stale_orphan_shares, stale_doa_shares,
                            math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95),
                            math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)),
                            paystr, net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100*stale_prop,
                            math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s),
                        )
                        
                        for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value):
                            print >>sys.stderr, '#'*40
                            print >>sys.stderr, '>>> Warning: ' + warning
                            print >>sys.stderr, '#'*40
                        
                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage))
                    
                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()
        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')
Ejemplo n.º 2
0
 merged_urls = map(separate_url, args.merged_urls)
 
 if args.logfile is None:
     args.logfile = os.path.join(datadir_path, 'log')
 
 logfile = logging.LogFile(args.logfile)
 pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
 sys.stdout = logging.AbortPipe(pipe)
 sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
 if hasattr(signal, "SIGUSR1"):
     def sigusr1(signum, frame):
         print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
         logfile.reopen()
         print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
     signal.signal(signal.SIGUSR1, sigusr1)
 deferral.RobustLoopingCall(logfile.reopen).start(5)
 
 class ErrorReporter(object):
     def __init__(self):
         self.last_sent = None
     
     def emit(self, eventDict):
         if not eventDict["isError"]:
             return
         
         if self.last_sent is not None and time.time() < self.last_sent + 5:
             return
         self.last_sent = time.time()
         
         if 'failure' in eventDict:
             text = ((eventDict.get('why') or 'Unhandled Error')
Ejemplo n.º 3
0
def get_web_root(wb,
                 datadir_path,
                 bitcoind_getinfo_var,
                 stop_event=variable.Event(),
                 static_dir=None):
    node = wb.node
    start_time = time.time()

    web_root = resource.Resource()

    def get_users():
        height, last = node.tracker.get_height_and_last(
            node.best_share_var.value)
        weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(
            node.best_share_var.value, min(height, 720), 65535 * 2**256)
        res = {}
        for script in sorted(weights, key=lambda s: weights[s]):
            res[bitcoin_data.script2_to_address(
                script, node.net.PARENT)] = weights[script] / total_weight
        return res

    def get_current_scaled_txouts(scale, trunc=0):
        txouts = node.get_current_txouts()
        total = sum(txouts.itervalues())
        results = dict((script, value * scale // total)
                       for script, value in txouts.iteritems())
        if trunc > 0:
            total_random = 0
            random_set = set()
            for s in sorted(results, key=results.__getitem__):
                if results[s] >= trunc:
                    break
                total_random += results[s]
                random_set.add(s)
            if total_random:
                winner = math.weighted_choice(
                    (script, results[script]) for script in random_set)
                for script in random_set:
                    del results[script]
                results[winner] = total_random
        if sum(results.itervalues()) < int(scale):
            results[math.weighted_choice(
                results.iteritems())] += int(scale) - sum(results.itervalues())
        return results

    def get_patron_sendmany(total=None, trunc='0.01'):
        if total is None:
            return 'need total argument. go to patron_sendmany/<TOTAL>'
        total = int(float(total) * 1e8)
        trunc = int(float(trunc) * 1e8)
        return json.dumps(
            dict((bitcoin_data.script2_to_address(script, node.net.PARENT),
                  value / 1e8) for script, value in get_current_scaled_txouts(
                      total, trunc).iteritems()
                 if bitcoin_data.script2_to_address(script, node.net.PARENT)
                 is not None))

    def get_global_stats():
        # averaged over last hour
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value),
                         3600 // node.net.SHARE_PERIOD)

        nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(
            node.tracker, node.best_share_var.value, lookbehind)
        stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)
        diff = bitcoin_data.target_to_difficulty(
            wb.current_work.value['bits'].target)

        return dict(
            pool_nonstale_hash_rate=nonstale_hash_rate,
            pool_hash_rate=nonstale_hash_rate / (1 - stale_prop),
            pool_stale_prop=stale_prop,
            min_difficulty=bitcoin_data.target_to_difficulty(
                node.tracker.items[node.best_share_var.value].max_target),
            network_block_difficulty=diff,
            network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD),
        )

    def get_local_stats():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.tracker.get_height(node.best_share_var.value),
                         3600 // node.net.SHARE_PERIOD)

        global_stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)

        my_unstale_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                               if share.hash in wb.my_share_hashes)
        my_orphan_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                              if share.hash in wb.my_share_hashes
                              and share.share_data['stale_info'] == 'orphan')
        my_doa_count = sum(1 for share in node.tracker.get_chain(
            node.best_share_var.value, lookbehind)
                           if share.hash in wb.my_share_hashes
                           and share.share_data['stale_info'] == 'doa')
        my_share_count = my_unstale_count + my_orphan_count + my_doa_count
        my_stale_count = my_orphan_count + my_doa_count

        my_stale_prop = my_stale_count / my_share_count if my_share_count != 0 else None

        my_work = sum(
            bitcoin_data.target_to_average_attempts(share.target) for share in
            node.tracker.get_chain(node.best_share_var.value, lookbehind - 1)
            if share.hash in wb.my_share_hashes)
        actual_time = (
            node.tracker.items[node.best_share_var.value].timestamp -
            node.tracker.items[node.tracker.get_nth_parent_hash(
                node.best_share_var.value, lookbehind - 1)].timestamp)
        share_att_s = my_work / actual_time

        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        (stale_orphan_shares,
         stale_doa_shares), shares, _ = wb.get_stale_counts()

        miner_last_difficulties = {}
        for addr in wb.last_work_shares.value:
            miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty(
                wb.last_work_shares.value[addr].target)

        return dict(
            my_hash_rates_in_last_hour=dict(
                note="DEPRECATED",
                nonstale=share_att_s,
                rewarded=share_att_s / (1 - global_stale_prop),
                actual=share_att_s /
                (1 - my_stale_prop) if my_stale_prop is not None else
                0,  # 0 because we don't have any shares anyway
            ),
            my_share_counts_in_last_hour=dict(
                shares=my_share_count,
                unstale_shares=my_unstale_count,
                stale_shares=my_stale_count,
                orphan_stale_shares=my_orphan_count,
                doa_stale_shares=my_doa_count,
            ),
            my_stale_proportions_in_last_hour=dict(
                stale=my_stale_prop,
                orphan_stale=my_orphan_count /
                my_share_count if my_share_count != 0 else None,
                dead_stale=my_doa_count /
                my_share_count if my_share_count != 0 else None,
            ),
            miner_hash_rates=miner_hash_rates,
            miner_dead_hash_rates=miner_dead_hash_rates,
            miner_last_difficulties=miner_last_difficulties,
            efficiency_if_miner_perfect=(1 - stale_orphan_shares / shares) /
            (1 - global_stale_prop) if shares else
            None,  # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection
            efficiency=(1 -
                        (stale_orphan_shares + stale_doa_shares) / shares) /
            (1 - global_stale_prop) if shares else None,
            peers=dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if not peer.incoming),
            ),
            shares=dict(
                total=shares,
                orphan=stale_orphan_shares,
                dead=stale_doa_shares,
            ),
            uptime=time.time() - start_time,
            attempts_to_share=bitcoin_data.target_to_average_attempts(
                node.tracker.items[node.best_share_var.value].max_target),
            attempts_to_block=bitcoin_data.target_to_average_attempts(
                node.bitcoind_work.value['bits'].target),
            block_value=node.bitcoind_work.value['subsidy'] * 1e-8,
            warnings=p2pool_data.get_warnings(node.tracker,
                                              node.best_share_var.value,
                                              node.net,
                                              bitcoind_getinfo_var.value,
                                              node.bitcoind_work.value),
            donation_proportion=wb.donation_percentage / 100,
            version=p2pool.__version__,
            protocol_version=p2p.Protocol.VERSION,
            fee=wb.worker_fee,
        )

    class WebInterface(deferred_resource.DeferredResource):
        def __init__(self, func, mime_type='application/json', args=()):
            deferred_resource.DeferredResource.__init__(self)
            self.func, self.mime_type, self.args = func, mime_type, args

        def getChild(self, child, request):
            return WebInterface(self.func, self.mime_type,
                                self.args + (child, ))

        @defer.inlineCallbacks
        def render_GET(self, request):
            request.setHeader('Content-Type', self.mime_type)
            request.setHeader('Access-Control-Allow-Origin', '*')
            res = yield self.func(*self.args)
            defer.returnValue(
                json.dumps(res) if self.mime_type ==
                'application/json' else res)

    def decent_height():
        return min(node.tracker.get_height(node.best_share_var.value), 720)

    web_root.putChild(
        'rate',
        WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(
            node.tracker, node.best_share_var.value, decent_height(
            )) / (1 - p2pool_data.get_average_stale_prop(
                node.tracker, node.best_share_var.value, decent_height()))))
    web_root.putChild(
        'difficulty',
        WebInterface(lambda: bitcoin_data.target_to_difficulty(
            node.tracker.items[node.best_share_var.value].max_target)))
    web_root.putChild('users', WebInterface(get_users))
    web_root.putChild(
        'user_stales',
        WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(
            ph, node.net.PARENT
        ), prop) for ph, prop in p2pool_data.get_user_stale_props(
            node.tracker, node.best_share_var.value,
            node.tracker.get_height(node.best_share_var.value)).iteritems())))
    web_root.putChild('fee', WebInterface(lambda: wb.worker_fee))
    web_root.putChild(
        'current_payouts',
        WebInterface(lambda: dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), value /
             1e8) for script, value in node.get_current_txouts().iteritems())))
    web_root.putChild('patron_sendmany',
                      WebInterface(get_patron_sendmany, 'text/plain'))
    web_root.putChild('global_stats', WebInterface(get_global_stats))
    web_root.putChild('local_stats', WebInterface(get_local_stats))
    web_root.putChild(
        'peer_addresses',
        WebInterface(lambda: ' '.join('%s%s' % (
            peer.transport.getPeer().host, ':' + str(peer.transport.getPeer(
            ).port) if peer.transport.getPeer().port != node.net.P2P_PORT else
            '') for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'peer_txpool_sizes',
        WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer(
        ).host, peer.transport.getPeer().port), peer.remembered_txs_size) for
                                  peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'pings',
        WebInterface(
            defer.inlineCallbacks(lambda: defer.returnValue(
                dict([(a, (yield b)) for a, b in [(
                    '%s:%i' % (peer.transport.getPeer().host,
                               peer.transport.getPeer().port),
                    defer.inlineCallbacks(lambda peer=peer: defer.returnValue(
                        min([(yield peer.do_ping().addCallback(
                            lambda x: x / 0.001).addErrback(lambda fail: None))
                             for i in xrange(3)])))()) for peer in list(
                                 node.p2p_node.peers.itervalues())]])))))
    web_root.putChild(
        'peer_versions',
        WebInterface(lambda: dict(
            ('%s:%i' % peer.addr, peer.other_sub_version)
            for peer in node.p2p_node.peers.itervalues())))
    web_root.putChild(
        'payout_addr',
        WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(
            wb.my_pubkey_hash, node.net.PARENT)))
    web_root.putChild(
        'payout_addrs',
        WebInterface(lambda: list(('%s' % bitcoin_data.pubkey_hash_to_address(
            add, node.net.PARENT)) for add in wb.pubkeys.keys)))
    web_root.putChild(
        'recent_blocks',
        WebInterface(lambda: [
            dict(
                ts=s.timestamp,
                hash='%064x' % s.header_hash,
                number=p2pool_data.parse_bip0034(s.share_data['coinbase'])[0],
                share='%064x' % s.hash,
            ) for s in node.tracker.get_chain(
                node.best_share_var.value,
                min(node.tracker.get_height(node.best_share_var.value), node.
                    net.CHAIN_LENGTH)) if s.pow_hash <= s.header['bits'].target
        ]))
    web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time))
    web_root.putChild(
        'stale_rates',
        WebInterface(
            lambda: p2pool_data.get_stale_counts(node.tracker,
                                                 node.best_share_var.value,
                                                 decent_height(),
                                                 rates=True)))

    new_root = resource.Resource()
    web_root.putChild('web', new_root)

    stat_log = []
    if os.path.exists(os.path.join(datadir_path, 'stats')):
        try:
            with open(os.path.join(datadir_path, 'stats'), 'rb') as f:
                stat_log = json.loads(f.read())
        except:
            log.err(None, 'Error loading stats:')

    def update_stat_log():
        while stat_log and stat_log[0]['time'] < time.time() - 24 * 60 * 60:
            stat_log.pop(0)

        lookbehind = 3600 // node.net.SHARE_PERIOD
        if node.tracker.get_height(node.best_share_var.value) < lookbehind:
            return None

        global_stale_prop = p2pool_data.get_average_stale_prop(
            node.tracker, node.best_share_var.value, lookbehind)
        (stale_orphan_shares,
         stale_doa_shares), shares, _ = wb.get_stale_counts()
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()

        my_current_payout = 0.0
        for add in wb.pubkeys.keys:
            my_current_payout += node.get_current_txouts().get(
                bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8
        stat_log.append(
            dict(
                time=time.time(),
                pool_hash_rate=p2pool_data.get_pool_attempts_per_second(
                    node.tracker, node.best_share_var.value, lookbehind) /
                (1 - global_stale_prop),
                pool_stale_prop=global_stale_prop,
                local_hash_rates=miner_hash_rates,
                local_dead_hash_rates=miner_dead_hash_rates,
                shares=shares,
                stale_shares=stale_orphan_shares + stale_doa_shares,
                stale_shares_breakdown=dict(orphan=stale_orphan_shares,
                                            doa=stale_doa_shares),
                current_payout=my_current_payout,
                peers=dict(
                    incoming=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if peer.incoming),
                    outgoing=sum(1
                                 for peer in node.p2p_node.peers.itervalues()
                                 if not peer.incoming),
                ),
                attempts_to_share=bitcoin_data.target_to_average_attempts(
                    node.tracker.items[node.best_share_var.value].max_target),
                attempts_to_block=bitcoin_data.target_to_average_attempts(
                    node.bitcoind_work.value['bits'].target),
                block_value=node.bitcoind_work.value['subsidy'] * 1e-8,
            ))

        with open(os.path.join(datadir_path, 'stats'), 'wb') as f:
            f.write(json.dumps(stat_log))

    x = deferral.RobustLoopingCall(update_stat_log)
    x.start(5 * 60)
    stop_event.watch(x.stop)
    new_root.putChild('log', WebInterface(lambda: stat_log))

    def get_share(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]

        return dict(
            parent='%064x' % share.previous_hash,
            far_parent='%064x' % share.share_info['far_share_hash'],
            children=[
                '%064x' % x for x in sorted(
                    node.tracker.reverse.get(share.hash, set()),
                    key=lambda sh: -len(node.tracker.reverse.get(sh, set())))
            ],  # sorted from most children to least children
            type_name=type(share).__name__,
            local=dict(
                verified=share.hash in node.tracker.verified.items,
                time_first_seen=start_time
                if share.time_seen == 0 else share.time_seen,
                peer_first_received_from=share.peer_addr,
            ),
            share_data=dict(
                timestamp=share.timestamp,
                target=share.target,
                max_target=share.max_target,
                payout_address=bitcoin_data.script2_to_address(
                    share.new_script, node.net.PARENT),
                donation=share.share_data['donation'] / 65535,
                stale_info=share.share_data['stale_info'],
                nonce=share.share_data['nonce'],
                desired_version=share.share_data['desired_version'],
                absheight=share.absheight,
                abswork=share.abswork,
            ),
            block=dict(
                hash='%064x' % share.header_hash,
                header=dict(
                    version=share.header['version'],
                    previous_block='%064x' % share.header['previous_block'],
                    merkle_root='%064x' % share.header['merkle_root'],
                    timestamp=share.header['timestamp'],
                    target=share.header['bits'].target,
                    nonce=share.header['nonce'],
                ),
                gentx=dict(
                    hash='%064x' % share.gentx_hash,
                    coinbase=share.share_data['coinbase'].ljust(
                        2, '\x00').encode('hex'),
                    value=share.share_data['subsidy'] * 1e-8,
                    last_txout_nonce='%016x' %
                    share.contents['last_txout_nonce'],
                ),
                other_transaction_hashes=[
                    '%064x' % x
                    for x in share.get_other_tx_hashes(node.tracker)
                ],
            ),
        )

    def get_share_address(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return None
        share = node.tracker.items[int(share_hash_str, 16)]
        return bitcoin_data.script2_to_address(share.new_script,
                                               node.net.PARENT)

    new_root.putChild(
        'payout_address',
        WebInterface(lambda share_hash_str: get_share_address(share_hash_str)))
    new_root.putChild(
        'share',
        WebInterface(lambda share_hash_str: get_share(share_hash_str)))
    new_root.putChild(
        'heads',
        WebInterface(lambda: ['%064x' % x for x in node.tracker.heads]))
    new_root.putChild(
        'verified_heads',
        WebInterface(
            lambda: ['%064x' % x for x in node.tracker.verified.heads]))
    new_root.putChild(
        'tails',
        WebInterface(lambda: [
            '%064x' % x for t in node.tracker.tails
            for x in node.tracker.reverse.get(t, set())
        ]))
    new_root.putChild(
        'verified_tails',
        WebInterface(lambda: [
            '%064x' % x for t in node.tracker.verified.tails
            for x in node.tracker.verified.reverse.get(t, set())
        ]))
    new_root.putChild(
        'best_share_hash',
        WebInterface(lambda: '%064x' % node.best_share_var.value))
    new_root.putChild(
        'my_share_hashes',
        WebInterface(
            lambda:
            ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes]))
    new_root.putChild(
        'my_share_hashes50',
        WebInterface(lambda: [
            '%064x' % my_share_hash
            for my_share_hash in list(wb.my_share_hashes)[:50]
        ]))

    def get_share_data(share_hash_str):
        if int(share_hash_str, 16) not in node.tracker.items:
            return ''
        share = node.tracker.items[int(share_hash_str, 16)]
        return p2pool_data.share_type.pack(share.as_share())

    new_root.putChild(
        'share_data',
        WebInterface(lambda share_hash_str: get_share_data(share_hash_str),
                     'application/octet-stream'))
    new_root.putChild(
        'currency_info',
        WebInterface(lambda: dict(
            symbol=node.net.PARENT.SYMBOL,
            block_explorer_url_prefix=node.net.PARENT.
            BLOCK_EXPLORER_URL_PREFIX,
            address_explorer_url_prefix=node.net.PARENT.
            ADDRESS_EXPLORER_URL_PREFIX,
            tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX,
        )))
    new_root.putChild('version', WebInterface(lambda: p2pool.__version__))

    hd_path = os.path.join(datadir_path, 'graph_db')
    hd_data = _atomic_read(hd_path)
    hd_obj = {}
    if hd_data is not None:
        try:
            hd_obj = json.loads(hd_data)
        except Exception:
            log.err(None, 'Error reading graph database:')
    dataview_descriptions = {
        'last_hour': graph.DataViewDescription(150, 60 * 60),
        'last_day': graph.DataViewDescription(300, 60 * 60 * 24),
        'last_week': graph.DataViewDescription(300, 60 * 60 * 24 * 7),
        'last_month': graph.DataViewDescription(300, 60 * 60 * 24 * 30),
        'last_year': graph.DataViewDescription(300, 60 * 60 * 24 * 365.25),
    }
    hd = graph.HistoryDatabase.from_obj(
        {
            'local_hash_rate':
            graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
            'local_dead_hash_rate':
            graph.DataStreamDescription(dataview_descriptions, is_gauge=False),
            'local_share_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions,
                is_gauge=False,
                multivalues=True,
                multivalue_undefined_means_0=True,
                default_func=graph.make_multivalue_migrator(
                    dict(good='local_share_hash_rate',
                         dead='local_dead_share_hash_rate',
                         orphan='local_orphan_share_hash_rate'),
                    post_func=lambda bins: [
                        dict((k, (v[0] - (sum(
                            bin.get(rem_k, (0, 0))[0]
                            for rem_k in ['dead', 'orphan'])
                                          if k == 'good' else 0), v[1]))
                             for k, v in bin.iteritems()) for bin in bins
                    ])),
            'pool_rates':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True,
                                        multivalue_undefined_means_0=True),
            'current_payout':
            graph.DataStreamDescription(dataview_descriptions),
            'current_payouts':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True),
            'peers':
            graph.DataStreamDescription(
                dataview_descriptions,
                multivalues=True,
                default_func=graph.make_multivalue_migrator(
                    dict(incoming='incoming_peers',
                         outgoing='outgoing_peers'))),
            'miner_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'miner_dead_hash_rates':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'desired_version_rates':
            graph.DataStreamDescription(dataview_descriptions,
                                        multivalues=True,
                                        multivalue_undefined_means_0=True),
            'traffic_rate':
            graph.DataStreamDescription(
                dataview_descriptions, is_gauge=False, multivalues=True),
            'getwork_latency':
            graph.DataStreamDescription(dataview_descriptions),
            'memory_usage':
            graph.DataStreamDescription(dataview_descriptions),
        }, hd_obj)
    x = deferral.RobustLoopingCall(
        lambda: _atomic_write(hd_path, json.dumps(hd.to_obj())))
    x.start(100)
    stop_event.watch(x.stop)

    @wb.pseudoshare_received.watch
    def _(work, dead, user):
        t = time.time()
        hd.datastreams['local_hash_rate'].add_datum(t, work)
        if dead:
            hd.datastreams['local_dead_hash_rate'].add_datum(t, work)
        if user is not None:
            hd.datastreams['miner_hash_rates'].add_datum(t, {user: work})
            if dead:
                hd.datastreams['miner_dead_hash_rates'].add_datum(
                    t, {user: work})

    @wb.share_received.watch
    def _(work, dead, share_hash):
        t = time.time()
        if not dead:
            hd.datastreams['local_share_hash_rates'].add_datum(
                t, dict(good=work))
        else:
            hd.datastreams['local_share_hash_rates'].add_datum(
                t, dict(dead=work))

        def later():
            res = node.tracker.is_child_of(share_hash,
                                           node.best_share_var.value)
            if res is None:
                res = False  # share isn't connected to sharechain? assume orphaned
            if res and dead:  # share was DOA, but is now in sharechain
                # move from dead to good
                hd.datastreams['local_share_hash_rates'].add_datum(
                    t, dict(dead=-work, good=work))
            elif not res and not dead:  # share wasn't DOA, and isn't in sharechain
                # move from good to orphan
                hd.datastreams['local_share_hash_rates'].add_datum(
                    t, dict(good=-work, orphan=work))

        reactor.callLater(200, later)

    @node.p2p_node.traffic_happened.watch
    def _(name, bytes):
        hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes})

    def add_point():
        if node.tracker.get_height(node.best_share_var.value) < 10:
            return None
        lookbehind = min(node.net.CHAIN_LENGTH,
                         60 * 60 // node.net.SHARE_PERIOD,
                         node.tracker.get_height(node.best_share_var.value))
        t = time.time()

        pool_rates = p2pool_data.get_stale_counts(node.tracker,
                                                  node.best_share_var.value,
                                                  lookbehind,
                                                  rates=True)
        pool_total = sum(pool_rates.itervalues())
        hd.datastreams['pool_rates'].add_datum(t, pool_rates)

        current_txouts = node.get_current_txouts()
        my_current_payouts = 0.0
        for add in wb.pubkeys.keys:
            my_current_payouts += current_txouts.get(
                bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8
        hd.datastreams['current_payout'].add_datum(t, my_current_payouts)
        miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates()
        current_txouts_by_address = dict(
            (bitcoin_data.script2_to_address(script, node.net.PARENT), amount)
            for script, amount in current_txouts.iteritems())
        hd.datastreams['current_payouts'].add_datum(
            t,
            dict((user, current_txouts_by_address[user] * 1e-8)
                 for user in miner_hash_rates
                 if user in current_txouts_by_address))

        hd.datastreams['peers'].add_datum(
            t,
            dict(
                incoming=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if peer.incoming),
                outgoing=sum(1 for peer in node.p2p_node.peers.itervalues()
                             if not peer.incoming),
            ))

        vs = p2pool_data.get_desired_version_counts(node.tracker,
                                                    node.best_share_var.value,
                                                    lookbehind)
        vs_total = sum(vs.itervalues())
        hd.datastreams['desired_version_rates'].add_datum(
            t,
            dict((str(k), v / vs_total * pool_total)
                 for k, v in vs.iteritems()))
        try:
            hd.datastreams['memory_usage'].add_datum(t, memory.resident())
        except:
            if p2pool.DEBUG:
                traceback.print_exc()

    x = deferral.RobustLoopingCall(add_point)
    x.start(5)
    stop_event.watch(x.stop)

    @node.bitcoind_work.changed.watch
    def _(new_work):
        hd.datastreams['getwork_latency'].add_datum(time.time(),
                                                    new_work['latency'])

    new_root.putChild(
        'graph_data',
        WebInterface(lambda source, view: hd.datastreams[source].dataviews[
            view].get_data(time.time())))

    if static_dir is None:
        static_dir = os.path.join(
            os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static')
    web_root.putChild('static', static.File(static_dir))

    return web_root
Ejemplo n.º 4
0
def main(args, net, datadir_path, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__, )
        print

        @defer.inlineCallbacks
        def connect_p2p():  # TODO: fix unknown type no type for 'getminings'
            # connect to dcrd over decred-p2p
            print '''Testing dcrd P2P connection to '%s:%s'...''' % (
                args.dcrd_address, args.dcrd_p2p_port)
            factory = decred_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.dcrd_address, args.dcrd_p2p_port, factory)

            def long():
                print '''    ...taking a while. Common reasons for this include all of dcrd's connection slots being used...'''

            long_dc = reactor.callLater(5, long)
            yield factory.getProtocol()  # waits until handshake is successful
            if not long_dc.called: long_dc.cancel()
            print '    ...success!'
            print
            defer.returnValue(factory)

        if args.testnet:  # establish p2p connection first if testnet so dcrd can work without connections
            factory = yield connect_p2p()

        # connect to dcrd over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('http' if args.dcrd_no_rpc_ssl else 'https',
                               args.dcrd_address, args.dcrd_rpc_port)
        print '''Testing dcrd RPC connection to '%s' with username '%s'...''' % (
            url, args.dcrd_rpc_username)
        dcrd = jsonrpc.HTTPProxy(
            url,
            dict(Authorization='Basic ' +
                 base64.b64encode(args.dcrd_rpc_username + ':' +
                                  args.dcrd_rpc_password)),
            timeout=30)
        yield helper.check(dcrd, net)

        temp_work = yield helper.getwork(dcrd)

        dcrd_getinfo_var = variable.Variable(None)

        @defer.inlineCallbacks
        def poll_warnings():
            dcrd_getinfo_var.set(
                (yield deferral.retry('Error while calling getinfo:')(
                    dcrd.rpc_getinfo)()))

        yield poll_warnings()
        deferral.RobustLoopingCall(poll_warnings).start(20 * 60)

        print '    ...success!'
        print '    Current block hash: {0:x}'.format(
            temp_work['previous_block'])
        print '    Current block height: {}'.format(temp_work['height'] - 1)
        print

        if not args.testnet:
            factory = yield connect_p2p()

        if args.pubkey_hash is None:
            #
            # Connect to locally running dcrwallet instance
            #
            # - TODO: find out if we use any other wallet api's apart from the 'getaccountaddress' below.
            #   If not we can:
            #    - make this self contained then remove the connection
            #      OR
            #    - remove all wallet connection logic and demand an address on the commandline
            #
            # wallet separate
            # 9111/19111
            walleturl = '%s://%s:%i/' % ('http' if args.dcrd_no_rpc_ssl else
                                         'https', args.dcrd_address,
                                         args.dcrd_rpc_wallet_port)
            print '''Testing dcrdwallet RPC connection to '%s' with username '%s'...''' % (
                walleturl, args.dcrd_rpc_username)
            dcrwallet = jsonrpc.HTTPProxy(
                walleturl,
                dict(Authorization='Basic ' +
                     base64.b64encode(args.dcrd_rpc_username + ':' +
                                      args.dcrd_rpc_password)),
                timeout=30)
            wallet_online = yield helper.checkwallet(dcrwallet, net)

        print 'Determining payout address...'
        pubkeys = keypool()
        if args.pubkey_hash is None:
            address_path = os.path.join(datadir_path, 'cached_payout_address')

            if os.path.exists(address_path):
                with open(address_path, 'rb') as f:
                    address = f.read().strip('\r\n')
                print '    Loaded cached address: %s...' % (address, )
            else:
                address = None

            if address is None and wallet_online:
                print '    Getting payout address from local dcrwallet...not a great idea...'
                address = yield deferral.retry('Error getting payout address from local dcrwallet:', 5) \
                                                (lambda: dcrwallet.rpc_getaccountaddress('default'))()
                with open(address_path, 'wb') as f:
                    f.write(address)

            if address is None:
                raise Exception('Failed to get an address from local wallet')

            my_pubkey_hash = decred_addr.address_to_pubkey_hash(
                address, net.PARENT)

            pubkeys.addkey(my_pubkey_hash)

            print '    ...success! Payout address:', address, 'pubkey hash', my_pubkey_hash
            print

        else:
            my_pubkey_hash = args.pubkey_hash
            print '    ...success! Payout address:', decred_addr.pubkey_hash_to_address(
                my_pubkey_hash, net.PARENT)
            print
            pubkeys.addkey(my_pubkey_hash)

        print "Loading shares..."
        shares = {}
        known_verified = set()

        def share_cb(share):
            share.time_seen = 0  # XXX
            shares[share.hash] = share
            if len(shares) % 1000 == 0 and shares:
                print "    %i" % (len(shares), )

        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net,
                                    share_cb, known_verified.add)
        print "    ...done loading %i shares (%i verified)!" % (
            len(shares), len(known_verified))
        print

        print 'Initializing work...'

        node = p2pool_node.Node(factory, dcrd, shares.values(), known_verified,
                                net)
        yield node.start()

        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        node.tracker.verified.removed.watch(
            lambda share: ss.forget_verified_share(share.hash))

        def save_shares():
            for share in node.tracker.get_chain(
                    node.best_share_var.value,
                    min(node.tracker.get_height(node.best_share_var.value),
                        2 * net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in node.tracker.verified.items:
                    ss.add_verified_hash(share.hash)

        deferral.RobustLoopingCall(save_shares).start(60)

        print '    ...success!'
        print

        print 'Joining p2pool network using port %i...' % (args.p2pool_port, )

        @defer.inlineCallbacks
        def parse(host):
            port = net.P2P_PORT
            if ':' in host:
                host, port_str = host.split(':')
                port = int(port_str)
            defer.returnValue(((yield reactor.resolve(host)), port))

        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(
                        dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >> sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()

        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()

        node.p2p_node = p2pool_node.P2PNode(
            node,
            port=args.p2pool_port,
            max_incoming_conns=args.p2pool_conns,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            desired_outgoing_conns=args.p2pool_outgoing_conns,
            advertise_ip=args.advertise_ip,
            external_ip=args.p2pool_external_ip,
        )
        node.p2p_node.start()

        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(node.p2p_node.addr_store.items()))

        deferral.RobustLoopingCall(save_addrs).start(60)

        print '    ...success!'
        print

        if args.upnp:

            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(
                                lan_ip, args.p2pool_port, args.p2pool_port,
                                'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1 / 120))

            upnp_thread()

        # start listening for workers with a JSON-RPC server

        print 'Listening for workers on %r port %i...' % (worker_endpoint[0],
                                                          worker_endpoint[1])

        wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage,
                               args.worker_fee, args, pubkeys, dcrd)
        web_root = web.get_web_root(wb, datadir_path, dcrd_getinfo_var)
        caching_wb = worker_interface.CachingWorkerBridge(wb)
        worker_interface.WorkerInterface(caching_wb).attach_to(
            web_root, get_handler=lambda request: request.redirect('/static/'))
        web_serverfactory = server.Site(web_root)

        serverfactory = switchprotocol.FirstByteSwitchFactory(
            {'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
        deferral.retry('Error binding to worker port:', traceback=False)(
            reactor.listenTCP)(worker_endpoint[1],
                               serverfactory,
                               interface=worker_endpoint[0])

        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')),
                  'wb') as f:
            pass

        print '    ...success!'
        print

        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (
            worker_endpoint[1], )
        if args.donation_percentage > 1.1:
            print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (
                args.donation_percentage, )
        elif args.donation_percentage < .9:
            print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (
                args.donation_percentage, )
        else:
            print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (
                args.donation_percentage, )
            print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print

        if hasattr(signal, 'SIGALRM'):
            signal.signal(
                signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                    sys.stderr.write, 'Watchdog timer went off at:\n' + ''.
                    join(traceback.format_stack())))
            signal.siginterrupt(signal.SIGALRM, False)
            deferral.RobustLoopingCall(signal.alarm, 30).start(1)


#         if args.irc_announce:
#             from twisted.words.protocols import irc
#             class IRCClient(irc.IRCClient):
#                 nickname = 'p2pool%02i' % (random.randrange(100),)
#                 channel = net.ANNOUNCE_CHANNEL
#                 def lineReceived(self, line):
#                     if p2pool.DEBUG:
#                         print repr(line)
#                     irc.IRCClient.lineReceived(self, line)
#                 def signedOn(self):
#                     self.in_channel = False
#                     irc.IRCClient.signedOn(self)
#                     self.factory.resetDelay()
#                     self.join(self.channel)
#                     @defer.inlineCallbacks
#                     def new_share(share):
#                         if not self.in_channel:
#                             return
#                         if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60:
#                             yield deferral.sleep(random.expovariate(1/60))
#                             message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), decred_addr.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash)
#                             if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages):
#                                 self.say(self.channel, message)
#                                 self._remember_message(message)
#                     self.watch_id = node.tracker.verified.added.watch(new_share)
#                     self.recent_messages = []
#                 def joined(self, channel):
#                     self.in_channel = True
#                 def left(self, channel):
#                     self.in_channel = False
#                 def _remember_message(self, message):
#                     self.recent_messages.append(message)
#                     while len(self.recent_messages) > 100:
#                         self.recent_messages.pop(0)
#                 def privmsg(self, user, channel, message):
#                     if channel == self.channel:
#                         self._remember_message(message)
#                 def connectionLost(self, reason):
#                     node.tracker.verified.added.unwatch(self.watch_id)
#                     print 'IRC connection lost:', reason.getErrorMessage()
#             class IRCClientFactory(protocol.ReconnectingClientFactory):
#                 protocol = IRCClient
#             reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0))

        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues()
                            if peer.incoming),
                    ) + (' FDs: %i R/%i W' %
                         (len(reactor.getReaders()), len(reactor.getWriters()))
                         if p2pool.DEBUG else '')

                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work'] / dt for datum in datums)
                    my_shares_per_s = sum(
                        datum['work'] / dt /
                        decred_data.target_to_average_attempts(
                            datum['share_target']) for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(
                            sum(1 for datum in datums if datum['dead']),
                            len(datums), 0.95),
                        math.format_dt(1 / my_shares_per_s)
                        if my_shares_per_s else '???',
                    )

                    if height > 2:
                        (stale_orphan_shares,
                         stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(
                            node.tracker, node.best_share_var.value,
                            min(60 * 60 // net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(
                            node.tracker, node.best_share_var.value,
                            min(height - 1, 60 * 60 //
                                net.SHARE_PERIOD)) / (1 - stale_prop)

                        paystr = ''
                        paytot = 0.0
                        for i in range(len(pubkeys.keys)):
                            curtot = node.get_current_txouts().get(
                                decred_addr.pubkey_hash_to_script2(
                                    pubkeys.keys[i]), 0)
                            paytot += curtot * 1e-8
                            paystr += "(%.4f)" % (curtot * 1e-8, )
                        paystr += "=%.4f" % (paytot, )
                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % (
                            shares,
                            stale_orphan_shares,
                            stale_doa_shares,
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95),
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95, lambda x: (1 - x) / (1 - stale_prop)),
                            paystr,
                            net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100 * stale_prop,
                            math.format_dt(
                                2**256 / node.dcrd_work.value['bits'].target /
                                real_att_s),
                        )

                        for warning in p2pool_data.get_warnings(
                                node.tracker, node.best_share_var.value, net,
                                dcrd_getinfo_var.value, node.dcrd_work.value):
                            print >> sys.stderr, '#' * 40
                            print >> sys.stderr, '>>> Warning: ' + warning
                            print >> sys.stderr, '#' * 40

                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (
                                len(gc.garbage), map(type, gc.garbage))

                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()

        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')
Ejemplo n.º 5
0
def run():
    if not hasattr(tcp.Client, 'abortConnection'):
        print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!"
        print 'Pausing for 3 seconds...'
        time.sleep(3)
    
    realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name)
    
    parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@')
    parser.add_argument('--version', action='version', version=p2pool.__version__)
    parser.add_argument('--net',
        help='use specified network (default: mogwai)',
        action='store', choices=sorted(realnets), default='mogwai', dest='net_name')
    parser.add_argument('--testnet',
        help='''use the network's testnet''',
        action='store_const', const=True, default=False, dest='testnet')
    parser.add_argument('--debug',
        help='enable debugging mode',
        action='store_const', const=True, default=False, dest='debug')
    parser.add_argument('-a', '--address',
        help='generate payouts to this address (default: <address requested from mogwaid>), or (dynamic)',
        type=str, action='store', default=None, dest='address')
    parser.add_argument('-i', '--numaddresses',
        help='number of mogwai auto-generated addresses to maintain for getwork dynamic address allocation',
        type=int, action='store', default=2, dest='numaddresses')
    parser.add_argument('-t', '--timeaddresses',
        help='seconds between acquisition of new address and removal of single old (default: 2 days or 172800s)',
        type=int, action='store', default=172800, dest='timeaddresses')
    parser.add_argument('--datadir',
        help='store data in this directory (default: <directory run_p2pool.py is in>/data)',
        type=str, action='store', default=None, dest='datadir')
    parser.add_argument('--logfile',
        help='''log to this file (default: data/<NET>/log)''',
        type=str, action='store', default=None, dest='logfile')
    parser.add_argument('--web-static',
        help='use an alternative web frontend in this directory (otherwise use the built-in frontend)',
        type=str, action='store', default=None, dest='web_static')
    parser.add_argument('--merged',
        help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)',
        type=str, action='append', default=[], dest='merged_urls')
    parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE',
        help='donate this percentage of work towards the development of p2pool (default: 1.0)',
        type=float, action='store', default=1.0, dest='donation_percentage')
    parser.add_argument('--iocp',
        help='use Windows IOCP API in order to avoid errors due to large number of sockets being open',
        action='store_true', default=False, dest='iocp')
    parser.add_argument('--irc-announce',
        help='announce any blocks found on irc://irc.freenode.net/#p2pool',
        action='store_true', default=False, dest='irc_announce')
    parser.add_argument('--no-bugreport',
        help='disable submitting caught exceptions to the author',
        action='store_true', default=False, dest='no_bugreport')
    
    p2pool_group = parser.add_argument_group('p2pool interface')
    p2pool_group.add_argument('--p2pool-port', metavar='PORT',
        help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='p2pool_port')
    p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]',
        help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses',
        type=str, action='append', default=[], dest='p2pool_nodes')
    parser.add_argument('--disable-upnp',
        help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''',
        action='store_false', default=True, dest='upnp')
    p2pool_group.add_argument('--max-conns', metavar='CONNS',
        help='maximum incoming connections (default: 40)',
        type=int, action='store', default=40, dest='p2pool_conns')
    p2pool_group.add_argument('--outgoing-conns', metavar='CONNS',
        help='outgoing connections (default: 6)',
        type=int, action='store', default=6, dest='p2pool_outgoing_conns')
    p2pool_group.add_argument('--external-ip', metavar='ADDR[:PORT]',
        help='specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing',
        type=str, action='store', default=None, dest='p2pool_external_ip')
    parser.add_argument('--disable-advertise',
        help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''',
        action='store_false', default=True, dest='advertise_ip')
    
    worker_group = parser.add_argument_group('worker interface')
    worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT',
        help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())),
        type=str, action='store', default=None, dest='worker_endpoint')
    worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE',
        help='''charge workers mining to their own mogwai address (by setting their miner's username to a mogwai address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''',
        type=float, action='store', default=0, dest='worker_fee')
    
    mogwaid_group = parser.add_argument_group('mogwaid interface')
    mogwaid_group.add_argument('--mogwaid-config-path', metavar='MOGWAID_CONFIG_PATH',
        help='custom configuration file path (when mogwaid -conf option used)',
        type=str, action='store', default=None, dest='mogwaid_config_path')
    mogwaid_group.add_argument('--mogwaid-address', metavar='MOGWAID_ADDRESS',
        help='connect to this address (default: 127.0.0.1)',
        type=str, action='store', default='127.0.0.1', dest='mogwaid_address')
    mogwaid_group.add_argument('--mogwaid-rpc-port', metavar='MOGWAID_RPC_PORT',
        help='''connect to JSON-RPC interface at this port (default: %s <read from mogwai.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='mogwaid_rpc_port')
    mogwaid_group.add_argument('--mogwaid-rpc-ssl',
        help='connect to JSON-RPC interface using SSL',
        action='store_true', default=False, dest='mogwaid_rpc_ssl')
    mogwaid_group.add_argument('--mogwaid-p2p-port', metavar='MOGWAID_P2P_PORT',
        help='''connect to P2P interface at this port (default: %s <read from mogwai.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())),
        type=int, action='store', default=None, dest='mogwaid_p2p_port')
    
    mogwaid_group.add_argument(metavar='MOGWAID_RPCUSERPASS',
        help='mogwaid RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from mogwai.conf)',
        type=str, action='store', default=[], nargs='*', dest='mogwaid_rpc_userpass')
    
    args = parser.parse_args()
    
    if args.debug:
        p2pool.DEBUG = True
        defer.setDebugging(True)
    else:
        p2pool.DEBUG = False
    
    net_name = args.net_name + ('_testnet' if args.testnet else '')
    net = networks.nets[net_name]
    
    datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name)
    if not os.path.exists(datadir_path):
        os.makedirs(datadir_path)
    
    if len(args.mogwaid_rpc_userpass) > 2:
        parser.error('a maximum of two arguments are allowed')
    args.mogwaid_rpc_username, args.mogwaid_rpc_password = ([None, None] + args.mogwaid_rpc_userpass)[-2:]
    
    if args.mogwaid_rpc_password is None:
        conf_path = args.mogwaid_config_path or net.PARENT.CONF_FILE_FUNC()
        if not os.path.exists(conf_path):
            parser.error('''mogwai configuration file not found. Manually enter your RPC password.\r\n'''
                '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n'''
                '''\r\n'''
                '''server=1\r\n'''
                '''rpcpassword=%x\r\n'''
                '''\r\n'''
                '''Keep that password secret! After creating the file, restart mogwai.''' % (conf_path, random.randrange(2**128)))
        conf = open(conf_path, 'rb').read()
        contents = {}
        for line in conf.splitlines(True):
            if '#' in line:
                line = line[:line.index('#')]
            if '=' not in line:
                continue
            k, v = line.split('=', 1)
            contents[k.strip()] = v.strip()
        for conf_name, var_name, var_type in [
            ('rpcuser', 'mogwaid_rpc_username', str),
            ('rpcpassword', 'mogwaid_rpc_password', str),
            ('rpcport', 'mogwaid_rpc_port', int),
            ('port', 'mogwaid_p2p_port', int),
        ]:
            if getattr(args, var_name) is None and conf_name in contents:
                setattr(args, var_name, var_type(contents[conf_name]))
        if 'rpcssl' in contents and contents['rpcssl'] != '0':
                args.mogwaid_rpc_ssl = True
        if args.mogwaid_rpc_password is None:
            parser.error('''mogwai configuration file didn't contain an rpcpassword= line! Add one!''')
    
    if args.mogwaid_rpc_username is None:
        args.mogwaid_rpc_username = ''
    
    if args.mogwaid_rpc_port is None:
        args.mogwaid_rpc_port = net.PARENT.RPC_PORT
    
    if args.mogwaid_p2p_port is None:
        args.mogwaid_p2p_port = net.PARENT.P2P_PORT
    
    if args.p2pool_port is None:
        args.p2pool_port = net.P2P_PORT
    
    if args.p2pool_outgoing_conns > 10:
        parser.error('''--outgoing-conns can't be more than 10''')
    
    if args.worker_endpoint is None:
        worker_endpoint = '', net.WORKER_PORT
    elif ':' not in args.worker_endpoint:
        worker_endpoint = '', int(args.worker_endpoint)
    else:
        addr, port = args.worker_endpoint.rsplit(':', 1)
        worker_endpoint = addr, int(port)
    
    if args.address is not None and args.address != 'dynamic':
        try:
            args.pubkey_hash = mogwai_data.address_to_pubkey_hash(args.address, net.PARENT)
        except Exception as e:
            parser.error('error parsing address: ' + repr(e))
    else:
        args.pubkey_hash = None
    
    def separate_url(url):
        s = urlparse.urlsplit(url)
        if '@' not in s.netloc:
            parser.error('merged url netloc must contain an "@"')
        userpass, new_netloc = s.netloc.rsplit('@', 1)
        return urlparse.urlunsplit(s._replace(netloc=new_netloc)), userpass
    merged_urls = map(separate_url, args.merged_urls)
    
    if args.logfile is None:
        args.logfile = os.path.join(datadir_path, 'log')
    
    logfile = logging.LogFile(args.logfile)
    pipe = logging.TimestampingPipe(logging.TeePipe([logging.EncodeReplacerPipe(sys.stderr), logfile]))
    sys.stdout = logging.AbortPipe(pipe)
    sys.stderr = log.DefaultObserver.stderr = logging.AbortPipe(logging.PrefixPipe(pipe, '> '))
    if hasattr(signal, "SIGUSR1"):
        def sigusr1(signum, frame):
            print 'Caught SIGUSR1, closing %r...' % (args.logfile,)
            logfile.reopen()
            print '...and reopened %r after catching SIGUSR1.' % (args.logfile,)
        signal.signal(signal.SIGUSR1, sigusr1)
    deferral.RobustLoopingCall(logfile.reopen).start(5)
    
    class ErrorReporter(object):
        def __init__(self):
            self.last_sent = None
        
        def emit(self, eventDict):
            if not eventDict["isError"]:
                return
            
            if self.last_sent is not None and time.time() < self.last_sent + 5:
                return
            self.last_sent = time.time()
            
            if 'failure' in eventDict:
                text = ((eventDict.get('why') or 'Unhandled Error')
                    + '\n' + eventDict['failure'].getTraceback())
            else:
                text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
            
            from twisted.web import client
            client.getPage(
                url='http://u.forre.st/p2pool_error.cgi',
                method='POST',
                postdata=p2pool.__version__ + ' ' + net.NAME + '\n' + text,
                timeout=15,
            ).addBoth(lambda x: None)
    if not args.no_bugreport:
        log.addObserver(ErrorReporter().emit)
    
    reactor.callWhenRunning(main, args, net, datadir_path, merged_urls, worker_endpoint)
    reactor.run()
Ejemplo n.º 6
0
def main(args, net, datadir_path, merged_urls, worker_endpoint):
    try:
        print 'p2pool (version %s)' % (p2pool.__version__, )
        print

        @defer.inlineCallbacks
        def connect_p2p():
            # connect to bitcoind over bitcoin-p2p
            print '''Testing bitcoind P2P connection to '%s:%s'...''' % (
                args.bitcoind_address, args.bitcoind_p2p_port)
            factory = bitcoin_p2p.ClientFactory(net.PARENT)
            reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port,
                               factory)
            yield factory.getProtocol()  # waits until handshake is successful
            print '    ...success!'
            print
            defer.returnValue(factory)

        if args.testnet:  # establish p2p connection first if testnet so bitcoind can work without connections
            factory = yield connect_p2p()

        # connect to bitcoind over JSON-RPC and do initial getmemorypool
        url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http',
                               args.bitcoind_address, args.bitcoind_rpc_port)
        print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (
            url, args.bitcoind_rpc_username)
        bitcoind = jsonrpc.HTTPProxy(
            url,
            dict(Authorization='Basic ' +
                 base64.b64encode(args.bitcoind_rpc_username + ':' +
                                  args.bitcoind_rpc_password)),
            timeout=30)
        yield helper.check(bitcoind, net)
        temp_work = yield helper.getwork(bitcoind)

        bitcoind_warning_var = variable.Variable(None)

        @defer.inlineCallbacks
        def poll_warnings():
            errors = (yield
                      deferral.retry('Error while calling getmininginfo:')(
                          bitcoind.rpc_getmininginfo)())['errors']
            bitcoind_warning_var.set(errors if errors != '' else None)

        yield poll_warnings()
        deferral.RobustLoopingCall(poll_warnings).start(20 * 60)

        print '    ...success!'
        print '    Current block hash: %x' % (temp_work['previous_block'], )
        print '    Current block height: %i' % (temp_work['height'] - 1, )
        print

        if not args.testnet:
            factory = yield connect_p2p()

        print 'Determining payout address...'
        pubkey_path = os.path.join(datadir_path, 'cached_payout_pubkey')

        if os.path.exists(pubkey_path):
            with open(pubkey_path, 'rb') as f:
                pubkey = f.read().strip('\r\n')
            print '    Loaded cached pubkey, payout address: %s...' % (
                bitcoin_data.pubkey_to_address(pubkey.decode('hex'),
                                               net.PARENT), )
        else:
            pubkey = None

        if pubkey is not None:
            res = yield deferral.retry(
                'Error validating cached pubkey:',
                5)(lambda: bitcoind.rpc_validatepubkey(pubkey))()
            if not res['isvalid'] or not res['ismine']:
                print '    Cached pubkey is either invalid or not controlled by local bitcoind!'
                address = None

        if pubkey is None:
            print '    Getting payout pubkey from bitcoind...'
            pubkey = yield deferral.retry(
                'Error getting payout pubkey from bitcoind:',
                5)(lambda: bitcoind.rpc_getnewpubkey('p2pool'))()

            with open(pubkey_path, 'wb') as f:
                f.write(pubkey)

        my_pubkey = pubkey.decode('hex')

        address = bitcoin_data.pubkey_to_address(my_pubkey, net.PARENT)

        my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(
            address, net.PARENT)
        print '    ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(
            my_pubkey_hash, net.PARENT)
        print

        ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net)
        shares = {}
        known_verified = set()
        print "Loading shares..."
        for i, (mode, contents) in enumerate(ss.get_shares()):
            if mode == 'share':
                contents.time_seen = 0
                shares[contents.hash] = contents
                if len(shares) % 1000 == 0 and shares:
                    print "    %i" % (len(shares), )
            elif mode == 'verified_hash':
                known_verified.add(contents)
            else:
                raise AssertionError()
        print "    ...done loading %i shares (%i verified)!" % (
            len(shares), len(known_verified))
        print

        print 'Initializing work...'

        node = p2pool_node.Node(factory, bitcoind, shares.values(),
                                known_verified, net)
        yield node.start()

        for share_hash in shares:
            if share_hash not in node.tracker.items:
                ss.forget_share(share_hash)
        for share_hash in known_verified:
            if share_hash not in node.tracker.verified.items:
                ss.forget_verified_share(share_hash)
        del shares, known_verified
        node.tracker.removed.watch(lambda share: ss.forget_share(share.hash))
        node.tracker.verified.removed.watch(
            lambda share: ss.forget_verified_share(share.hash))

        def save_shares():
            for share in node.tracker.get_chain(
                    node.best_share_var.value,
                    min(node.tracker.get_height(node.best_share_var.value),
                        2 * net.CHAIN_LENGTH)):
                ss.add_share(share)
                if share.hash in node.tracker.verified.items:
                    ss.add_verified_hash(share.hash)

        deferral.RobustLoopingCall(save_shares).start(60)

        print '    ...success!'
        print

        print 'Joining p2pool network using port %i...' % (args.p2pool_port, )

        @defer.inlineCallbacks
        def parse(x):
            if ':' in x:
                ip, port = x.split(':')
                defer.returnValue(((yield reactor.resolve(ip)), int(port)))
            else:
                defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT))

        addrs = {}
        if os.path.exists(os.path.join(datadir_path, 'addrs')):
            try:
                with open(os.path.join(datadir_path, 'addrs'), 'rb') as f:
                    addrs.update(
                        dict((tuple(k), v) for k, v in json.loads(f.read())))
            except:
                print >> sys.stderr, 'error parsing addrs'
        for addr_df in map(parse, net.BOOTSTRAP_ADDRS):
            try:
                addr = yield addr_df
                if addr not in addrs:
                    addrs[addr] = (0, time.time(), time.time())
            except:
                log.err()

        connect_addrs = set()
        for addr_df in map(parse, args.p2pool_nodes):
            try:
                connect_addrs.add((yield addr_df))
            except:
                log.err()

        node.p2p_node = p2pool_node.P2PNode(
            node,
            port=args.p2pool_port,
            max_incoming_conns=args.p2pool_conns,
            addr_store=addrs,
            connect_addrs=connect_addrs,
            desired_outgoing_conns=args.p2pool_outgoing_conns,
        )
        node.p2p_node.start()

        def save_addrs():
            with open(os.path.join(datadir_path, 'addrs'), 'wb') as f:
                f.write(json.dumps(node.p2p_node.addr_store.items()))

        deferral.RobustLoopingCall(save_addrs).start(60)

        print '    ...success!'
        print

        if args.upnp:

            @defer.inlineCallbacks
            def upnp_thread():
                while True:
                    try:
                        is_lan, lan_ip = yield ipdiscover.get_local_ip()
                        if is_lan:
                            pm = yield portmapper.get_port_mapper()
                            yield pm._upnp.add_port_mapping(
                                lan_ip, args.p2pool_port, args.p2pool_port,
                                'p2pool', 'TCP')
                    except defer.TimeoutError:
                        pass
                    except:
                        if p2pool.DEBUG:
                            log.err(None, 'UPnP error:')
                    yield deferral.sleep(random.expovariate(1 / 120))

            upnp_thread()

        # start listening for workers with a JSON-RPC server

        print 'Listening for workers on %r port %i...' % (worker_endpoint[0],
                                                          worker_endpoint[1])

        wb = work.WorkerBridge(node, my_pubkey, args.donation_percentage,
                               merged_urls, args.worker_fee)
        web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var)
        caching_wb = worker_interface.CachingWorkerBridge(wb)
        worker_interface.WorkerInterface(caching_wb).attach_to(
            web_root, get_handler=lambda request: request.redirect('/static/'))
        web_serverfactory = server.Site(web_root)

        serverfactory = switchprotocol.FirstByteSwitchFactory(
            {'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory)
        deferral.retry('Error binding to worker port:', traceback=False)(
            reactor.listenTCP)(worker_endpoint[1],
                               serverfactory,
                               interface=worker_endpoint[0])

        with open(os.path.join(os.path.join(datadir_path, 'ready_flag')),
                  'wb') as f:
            pass

        print '    ...success!'
        print

        # done!
        print 'Started successfully!'
        print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (
            worker_endpoint[1], )
        if args.donation_percentage > 1.1:
            print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (
                args.donation_percentage, )
        elif args.donation_percentage < .9:
            print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (
                args.donation_percentage, )
        else:
            print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (
                args.donation_percentage, )
            print 'You can increase this amount with --give-author argument! (or decrease it, if you must)'
        print

        if hasattr(signal, 'SIGALRM'):
            signal.signal(
                signal.SIGALRM, lambda signum, frame: reactor.callFromThread(
                    sys.stderr.write, 'Watchdog timer went off at:\n' + ''.
                    join(traceback.format_stack())))
            signal.siginterrupt(signal.SIGALRM, False)
            deferral.RobustLoopingCall(signal.alarm, 30).start(1)

        if args.irc_announce:
            from twisted.words.protocols import irc

            class IRCClient(irc.IRCClient):
                nickname = 'p2pool%02i' % (random.randrange(100), )
                channel = net.ANNOUNCE_CHANNEL

                def lineReceived(self, line):
                    if p2pool.DEBUG:
                        print repr(line)
                    irc.IRCClient.lineReceived(self, line)

                def signedOn(self):
                    self.in_channel = False
                    irc.IRCClient.signedOn(self)
                    self.factory.resetDelay()
                    self.join(self.channel)

                    @defer.inlineCallbacks
                    def new_share(share):
                        if not self.in_channel:
                            return
                        if share.pow_hash <= share.header[
                                'bits'].target and abs(share.timestamp -
                                                       time.time()) < 10 * 60:
                            yield deferral.sleep(random.expovariate(1 / 60))
                            message = '\x02%s BLOCK FOUND by %s! %s%064x' % (
                                net.NAME.upper(),
                                bitcoin_data.script2_to_address(
                                    share.new_script, net.PARENT),
                                net.PARENT.BLOCK_EXPLORER_URL_PREFIX,
                                share.header_hash)
                            if all('%x' %
                                   (share.header_hash, ) not in old_message
                                   for old_message in self.recent_messages):
                                self.say(self.channel, message)
                                self._remember_message(message)

                    self.watch_id = node.tracker.verified.added.watch(
                        new_share)
                    self.recent_messages = []

                def joined(self, channel):
                    self.in_channel = True

                def left(self, channel):
                    self.in_channel = False

                def _remember_message(self, message):
                    self.recent_messages.append(message)
                    while len(self.recent_messages) > 100:
                        self.recent_messages.pop(0)

                def privmsg(self, user, channel, message):
                    if channel == self.channel:
                        self._remember_message(message)

                def connectionLost(self, reason):
                    node.tracker.verified.added.unwatch(self.watch_id)
                    print 'IRC connection lost:', reason.getErrorMessage()

            class IRCClientFactory(protocol.ReconnectingClientFactory):
                protocol = IRCClient

            reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory())

        @defer.inlineCallbacks
        def status_thread():
            last_str = None
            last_time = 0
            while True:
                yield deferral.sleep(3)
                try:
                    height = node.tracker.get_height(node.best_share_var.value)
                    this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % (
                        height,
                        len(node.tracker.verified.items),
                        len(node.tracker.items),
                        len(node.p2p_node.peers),
                        sum(1 for peer in node.p2p_node.peers.itervalues()
                            if peer.incoming),
                    ) + (' FDs: %i R/%i W' %
                         (len(reactor.getReaders()), len(reactor.getWriters()))
                         if p2pool.DEBUG else '')

                    datums, dt = wb.local_rate_monitor.get_datums_in_last()
                    my_att_s = sum(datum['work'] / dt for datum in datums)
                    this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % (
                        math.format(int(my_att_s)),
                        math.format_dt(dt),
                        math.format_binomial_conf(
                            sum(1 for datum in datums if datum['dead']),
                            len(datums), 0.95),
                        math.format_dt(2**256 / node.tracker.items[
                            node.best_share_var.value].max_target / my_att_s)
                        if my_att_s and node.best_share_var.value else '???',
                    )

                    if height > 2:
                        (stale_orphan_shares,
                         stale_doa_shares), shares, _ = wb.get_stale_counts()
                        stale_prop = p2pool_data.get_average_stale_prop(
                            node.tracker, node.best_share_var.value,
                            min(60 * 60 // net.SHARE_PERIOD, height))
                        real_att_s = p2pool_data.get_pool_attempts_per_second(
                            node.tracker, node.best_share_var.value,
                            min(height - 1, 60 * 60 //
                                net.SHARE_PERIOD)) / (1 - stale_prop)

                        this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % (
                            shares,
                            stale_orphan_shares,
                            stale_doa_shares,
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95),
                            math.format_binomial_conf(
                                stale_orphan_shares + stale_doa_shares, shares,
                                0.95, lambda x: (1 - x) / (1 - stale_prop)),
                            node.get_current_txouts().get(
                                bitcoin_data.pubkey_to_script2(my_pubkey), 0) *
                            1e-6,
                            net.PARENT.SYMBOL,
                        )
                        this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % (
                            math.format(int(real_att_s)),
                            100 * stale_prop,
                            math.format_dt(
                                2**256 /
                                node.bitcoind_work.value['bits'].target /
                                real_att_s),
                        )

                        for warning in p2pool_data.get_warnings(
                                node.tracker, node.best_share_var.value, net,
                                bitcoind_warning_var.value,
                                node.bitcoind_work.value):
                            print >> sys.stderr, '#' * 40
                            print >> sys.stderr, '>>> Warning: ' + warning
                            print >> sys.stderr, '#' * 40

                        if gc.garbage:
                            print '%i pieces of uncollectable cyclic garbage! Types: %r' % (
                                len(gc.garbage), map(type, gc.garbage))

                    if this_str != last_str or time.time() > last_time + 15:
                        print this_str
                        last_str = this_str
                        last_time = time.time()
                except:
                    log.err()

        status_thread()
    except:
        reactor.stop()
        log.err(None, 'Fatal error:')