def freshen_addresses(self, c): self.cur_address_throttle = time.time() if self.cur_address_throttle - self.address_throttle < 30: return self.address_throttle = time.time() print "ATTEMPTING TO FRESHEN ADDRESS." self.address = yield deferral.retry( 'Error getting a dynamic address from bitcoind:', 5)(lambda: self.bitcoind.rpc_getnewaddress('p2pool'))() new_pubkey, new_pubkey_version = bitcoin_data.address_to_pubkey_hash( self.address, self.net) self.pubkeys.popleft() self.pubkeys.addkey({ 'hash': new_pubkey, 'version': new_pubkey_version }) print " Updated payout pool:" for i in range(len(self.pubkeys.keys)): print ' ...payout %d: %s(%f)' % ( i, bitcoin_data.pubkey_hash_to_address(self.pubkeys.keys[i], self.net), self.pubkeys.keyweights[i], ) self.pubkeys.updatestamp(c) print " Next address rotation in : %fs" % (time.time() - c + self.args.timeaddresses)
def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8) pkh_hash_rates = wb.get_local_addr_rates() addr_hash_rates = dict((bitcoin_data.pubkey_hash_to_address(pubkey_hash, node.net.PARENT), rate) for pubkey_hash, rate in pkh_hash_rates.iteritems()) current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum(t, dict((address, current_txouts_by_address[address]*1e-8) for address in addr_hash_rates if address in current_txouts_by_address)) hd.datastreams['peers'].add_datum(t, dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), )) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems())) try: hd.datastreams['memory_usage'].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc()
def freshen_addresses(self, c): self.cur_address_throttle = time.time() if self.cur_address_throttle - self.address_throttle < 30: return self.address_throttle=time.time() print "ATTEMPTING TO FRESHEN ADDRESS." self.address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: self.bitcoind.rpc_getnewaddress('p2pool'))() new_pubkey = bitcoin_data.address_to_pubkey_hash(self.address, self.net) self.pubkeys.popleft() self.pubkeys.addkey(new_pubkey) print " Updated payout pool:" for i in range(len(self.pubkeys.keys)): print ' ...payout %d: %s(%f)' % (i, bitcoin_data.pubkey_hash_to_address(self.pubkeys.keys[i], self.net),self.pubkeys.keyweights[i],) self.pubkeys.updatestamp(c) print " Next address rotation in : %fs" % (time.time()-c+self.args.timeaddresses)
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) def long(): print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...''' long_dc = reactor.callLater(5, long) yield factory.getProtocol() # waits until handshake is successful if not long_dc.called: long_dc.cancel() print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) yield helper.check(bitcoind, net) temp_work = yield helper.getwork(bitcoind) bitcoind_getinfo_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getinfo)())) yield poll_warnings() deferral.RobustLoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (temp_work['height'] - 1,) print if not args.testnet: factory = yield connect_p2p() print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address,) else: address = None if address is not None: res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print print "Loading shares..." shares = {} known_verified = set() def share_cb(share): share.time_seen = 0 # XXX shares[share.hash] = share if len(shares) % 1000 == 0 and shares: print " %i" % (len(shares),) ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add) print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print print 'Initializing work...' node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) yield node.start() for share_hash in shares: if share_hash not in node.tracker.items: ss.forget_share(share_hash) for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) def save_shares(): for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in node.tracker.verified.items: ss.add_verified_hash(share.hash) deferral.RobustLoopingCall(save_shares).start(60) print ' ...success!' print print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(host): port = net.P2P_PORT if ':' in host: host, port_str = host.split(':') port = int(port_str) defer.returnValue(((yield reactor.resolve(host)), port)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() node.p2p_node = p2pool_node.P2PNode(node, port=args.p2pool_port, max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, desired_outgoing_conns=args.p2pool_outgoing_conns, advertise_ip=args.advertise_ip, ) node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(node.p2p_node.addr_store.items())) deferral.RobustLoopingCall(save_addrs).start(60) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) if args.address_share_rate is not None: share_rate_type = 'address' share_rate = args.address_share_rate else: share_rate_type = 'miner' share_rate = args.miner_share_rate wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee, args.min_difficulty, share_rate, share_rate_type) web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var) caching_wb = worker_interface.CachingWorkerBridge(wb) worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('static/')) web_serverfactory = server.Site(web_root) serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) deferral.RobustLoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = node.tracker.verified.added.watch(new_share) self.recent_messages = [] def joined(self, channel): self.in_channel = True def left(self, channel): self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0)) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event(), static_dir=None): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(): height, last = node.tracker.get_height_and_last( node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights( node.best_share_var.value, min(height, 720), 65535 * 2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[bitcoin_data.script2_to_address( script, node.net.PARENT)] = weights[script] / total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value * scale // total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice( (script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice( results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total) * 1e8) trunc = int(float(trunc) * 1e8) return json.dumps( dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e8) for script, value in get_current_scaled_txouts( total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None)) def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind) stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) diff = bitcoin_data.target_to_difficulty( wb.current_work.value['bits'].target) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate / (1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty( node.tracker.items[node.best_share_var.value].max_target), network_block_difficulty=diff, network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) my_orphan_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') my_doa_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count / my_share_count if my_share_count != 0 else None my_work = sum( bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes) actual_time = ( node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash( node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_last_difficulties = {} for addr in wb.last_work_shares.value: miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty( wb.last_work_shares.value[addr].target) return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s / (1 - global_stale_prop), actual=share_att_s / (1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count / my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count / my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, miner_last_difficulties=miner_last_difficulties, efficiency_if_miner_perfect=(1 - stale_orphan_shares / shares) / (1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares + stale_doa_shares) / shares) / (1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares, ), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts( node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy'] * 1e-8, warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value), donation_proportion=wb.donation_percentage / 100, version=p2pool.__version__, protocol_version=p2p.Protocol.VERSION, fee=wb.worker_fee, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child, )) @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = yield self.func(*self.args) defer.returnValue( json.dumps(res) if self.mime_type == 'application/json' else res) def decent_height(): return min(node.tracker.get_height(node.best_share_var.value), 720) web_root.putChild( 'rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, decent_height( )) / (1 - p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, decent_height())))) web_root.putChild( 'difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty( node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild( 'user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address( ph, node.net.PARENT ), prop) for ph, prop in p2pool_data.get_user_stale_props( node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) web_root.putChild( 'current_payouts', WebInterface(lambda: dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e8) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild( 'peer_addresses', WebInterface(lambda: ' '.join('%s%s' % ( peer.transport.getPeer().host, ':' + str(peer.transport.getPeer( ).port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues()))) web_root.putChild( 'peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer( ).host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild( 'pings', WebInterface( defer.inlineCallbacks(lambda: defer.returnValue( dict([(a, (yield b)) for a, b in [( '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks(lambda peer=peer: defer.returnValue( min([(yield peer.do_ping().addCallback( lambda x: x / 0.001).addErrback(lambda fail: None)) for i in xrange(3)])))()) for peer in list( node.p2p_node.peers.itervalues())]]))))) web_root.putChild( 'peer_versions', WebInterface(lambda: dict( ('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild( 'payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address( wb.my_pubkey_hash, node.net.PARENT))) web_root.putChild( 'payout_addrs', WebInterface(lambda: list(('%s' % bitcoin_data.pubkey_hash_to_address( add, node.net.PARENT)) for add in wb.pubkeys.keys))) web_root.putChild( 'recent_blocks', WebInterface(lambda: [ dict( ts=s.timestamp, hash='%064x' % s.header_hash, number=p2pool_data.parse_bip0034(s.share_data['coinbase'])[0], share='%064x' % s.hash, ) for s in node.tracker.get_chain( node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), node. net.CHAIN_LENGTH)) if s.pow_hash <= s.header['bits'].target ])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) web_root.putChild( 'stale_rates', WebInterface( lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24 * 60 * 60: stat_log.pop(0) lookbehind = 3600 // node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() my_current_payout = 0.0 for add in wb.pubkeys.keys: my_current_payout += node.get_current_txouts().get( bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8 stat_log.append( dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind) / (1 - global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=my_current_payout, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts( node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy'] * 1e-8, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) x = deferral.RobustLoopingCall(update_stat_log) x.start(5 * 60) stop_event.watch(x.stop) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, far_parent='%064x' % share.share_info['far_share_hash'], children=[ '%064x' % x for x in sorted( node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set()))) ], # sorted from most children to least children type_name=type(share).__name__, local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address( share.new_script, node.net.PARENT), donation=share.share_data['donation'] / 65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], absheight=share.absheight, abswork=share.abswork, ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust( 2, '\x00').encode('hex'), value=share.share_data['subsidy'] * 1e-8, last_txout_nonce='%016x' % share.contents['last_txout_nonce'], ), other_transaction_hashes=[ '%064x' % x for x in share.get_other_tx_hashes(node.tracker) ], ), ) def get_share_address(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return bitcoin_data.script2_to_address(share.new_script, node.net.PARENT) new_root.putChild( 'payout_address', WebInterface(lambda share_hash_str: get_share_address(share_hash_str))) new_root.putChild( 'share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild( 'heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) new_root.putChild( 'verified_heads', WebInterface( lambda: ['%064x' % x for x in node.tracker.verified.heads])) new_root.putChild( 'tails', WebInterface(lambda: [ '%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set()) ])) new_root.putChild( 'verified_tails', WebInterface(lambda: [ '%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set()) ])) new_root.putChild( 'best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) new_root.putChild( 'my_share_hashes', WebInterface( lambda: ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes])) new_root.putChild( 'my_share_hashes50', WebInterface(lambda: [ '%064x' % my_share_hash for my_share_hash in list(wb.my_share_hashes)[:50] ])) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return '' share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share()) new_root.putChild( 'share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild( 'currency_info', WebInterface(lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT. BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT. ADDRESS_EXPLORER_URL_PREFIX, tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60 * 60), 'last_day': graph.DataViewDescription(300, 60 * 60 * 24), 'last_week': graph.DataViewDescription(300, 60 * 60 * 24 * 7), 'last_month': graph.DataViewDescription(300, 60 * 60 * 24 * 30), 'last_year': graph.DataViewDescription(300, 60 * 60 * 24 * 365.25), } hd = graph.HistoryDatabase.from_obj( { 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rates': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True, multivalue_undefined_means_0=True, default_func=graph.make_multivalue_migrator( dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'), post_func=lambda bins: [ dict((k, (v[0] - (sum( bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins ])), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), 'peers': graph.DataStreamDescription( dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator( dict(incoming='incoming_peers', outgoing='outgoing_peers'))), 'miner_hash_rates': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'traffic_rate': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), 'memory_usage': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) x = deferral.RobustLoopingCall( lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) x.start(100) stop_event.watch(x.stop) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum( t, {user: work}) @wb.share_received.watch def _(work, dead, share_hash): t = time.time() if not dead: hd.datastreams['local_share_hash_rates'].add_datum( t, dict(good=work)) else: hd.datastreams['local_share_hash_rates'].add_datum( t, dict(dead=work)) def later(): res = node.tracker.is_child_of(share_hash, node.best_share_var.value) if res is None: res = False # share isn't connected to sharechain? assume orphaned if res and dead: # share was DOA, but is now in sharechain # move from dead to good hd.datastreams['local_share_hash_rates'].add_datum( t, dict(dead=-work, good=work)) elif not res and not dead: # share wasn't DOA, and isn't in sharechain # move from good to orphan hd.datastreams['local_share_hash_rates'].add_datum( t, dict(good=-work, orphan=work)) reactor.callLater(200, later) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60 * 60 // node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() my_current_payouts = 0.0 for add in wb.pubkeys.keys: my_current_payouts += current_txouts.get( bitcoin_data.pubkey_hash_to_script2(add), 0) * 1e-8 hd.datastreams['current_payout'].add_datum(t, my_current_payouts) miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() current_txouts_by_address = dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum( t, dict((user, current_txouts_by_address[user] * 1e-8) for user in miner_hash_rates if user in current_txouts_by_address)) hd.datastreams['peers'].add_datum( t, dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), )) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_version_rates'].add_datum( t, dict((str(k), v / vs_total * pool_total) for k, v in vs.iteritems())) try: hd.datastreams['memory_usage'].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc() x = deferral.RobustLoopingCall(add_point) x.start(5) stop_event.watch(x.stop) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild( 'graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[ view].get_data(time.time()))) if static_dir is None: static_dir = os.path.join( os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static') web_root.putChild('static', static.File(static_dir)) return web_root
def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target): if self.node.best_share_var.value is None and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares') if self.merged_work.value: tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value) mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)] mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict( merkle_root=bitcoin_data.merkle_hash(mm_hashes), size=size, nonce=0, )) mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()] else: mm_data = '' mm_later = [] tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']] tx_map = dict(zip(tx_hashes, self.current_work.value['transactions'])) previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None if previous_share is None: share_type = p2pool_data.Share else: previous_share_type = type(previous_share) if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH: share_type = previous_share_type else: successor_type = previous_share_type.SUCCESSOR counts = p2pool_data.get_desired_version_counts(self.node.tracker, self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10) upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues()) if upgraded > .65: print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95) print # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100: share_type = successor_type else: share_type = previous_share_type if desired_share_target is None: desired_share_target = 2**256-1 local_addr_rates = self.get_local_addr_rates() local_hash_rate = local_addr_rates.get(pubkey_hash, 0) if local_hash_rate > 0.0: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty lookbehind = 3600//self.node.net.SHARE_PERIOD block_subsidy = self.node.bitcoind_work.value['subsidy'] if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind: expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \ * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy) ) if True: share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction( tracker=self.node.tracker, share_data=dict( previous_share_hash=self.node.best_share_var.value, coinbase=(script.create_push_script([ self.current_work.value['height'], ] + ([mm_data] if mm_data else []) + [ ]) + self.current_work.value['coinbaseflags'])[:100], nonce=random.randrange(2**32), pubkey_hash=pubkey_hash, subsidy=self.current_work.value['subsidy'], donation=math.perfect_round(65535*self.donation_percentage/100), stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain): 'orphan' if orphans > orphans_recorded_in_chain else 'doa' if doas > doas_recorded_in_chain else None )(*self.get_stale_counts()), desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION, ), block_target=self.current_work.value['bits'].target, desired_timestamp=int(time.time() + 0.5), desired_target=desired_share_target, ref_merkle_link=dict(branch=[], index=0), desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']), net=self.node.net, known_txs=tx_map, #base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']), base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['bits'].bits, self.current_work.value['height']), ) packed_gentx = bitcoin_data.tx_type.pack(gentx) other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes] mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later] if desired_pseudoshare_target is None: target = 2**256-1 local_hash_rate = self._estimate_local_hash_rate() if local_hash_rate is not None: target = min(target, bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty else: target = desired_pseudoshare_target target = max(target, share_info['bits'].target) for aux_work, index, hashes in mm_later: target = max(target, aux_work['target']) target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE) getwork_time = time.time() lp_count = self.new_work_event.times merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) print 'New work for worker %s! Difficulty: %.06f Share difficulty: %.06f (speed %.06f) Total block value: %.6f %s including %i transactions' % ( bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT), bitcoin_data.target_to_difficulty(target), bitcoin_data.target_to_difficulty(share_info['bits'].target), self.get_local_addr_rates().get(pubkey_hash, 0), self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL, len(self.current_work.value['transactions']), ) #need this for stats self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT)]=share_info['bits'] ba = dict( version=min(self.current_work.value['version'], 2), previous_block=self.current_work.value['previous_block'], merkle_link=merkle_link, coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4], coinb2=packed_gentx[-4:], timestamp=self.current_work.value['time'], bits=self.current_work.value['bits'], share_target=target, ) received_header_hashes = set() def got_response(header, user, coinbase_nonce): assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx #header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) header_hash = self.node.net.PARENT.BLOCKHASH_FUNC(bitcoin_data.block_header_type.pack(header)) pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) try: if pow_hash <= header['bits'].target or p2pool.DEBUG: helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net) if pow_hash <= header['bits'].target: print print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash) print except: log.err(None, 'Error while processing potential block:') user, _, _, _ = self.get_user_details(user) assert header['previous_block'] == ba['previous_block'] assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link) assert header['bits'] == ba['bits'] on_time = self.new_work_event.times == lp_count for aux_work, index, hashes in mm_later: try: if pow_hash <= aux_work['target'] or p2pool.DEBUG: df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)( pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'), bitcoin_data.aux_pow_type.pack(dict( merkle_tx=dict( tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link, ), merkle_link=bitcoin_data.calculate_merkle_link(hashes, index), parent_block_header=header, )).encode('hex'), ) @df.addCallback def _(result, aux_work=aux_work): if result != (pow_hash <= aux_work['target']): print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target']) else: print 'Merged block submittal result: %s' % (result,) @df.addErrback def _(err): log.err(err, 'Error submitting merged block:') except: log.err(None, 'Error while processing merged mining POW:') if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes: last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce) share = get_share(header, last_txout_nonce) print 'GOT SHARE! %s %s prev %s age %.2fs%s' % ( user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time, ' DEAD ON ARRIVAL' if not on_time else '', ) self.my_share_hashes.add(share.hash) if not on_time: self.my_doa_share_hashes.add(share.hash) self.node.tracker.add(share) self.node.set_best_share() try: if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None: self.node.p2p_node.broadcast_share(share.hash) except: log.err(None, 'Error forwarding block solution:') self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash) if pow_hash > target: print 'Worker %s submitted share with hash > target:' % (user,) print ' Hash: %56x' % (pow_hash,) print ' Target: %56x' % (target,) elif header_hash in received_header_hashes: print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,) else: received_header_hashes.add(header_hash) self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user) self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target))) while len(self.recent_shares_ts_work) > 50: self.recent_shares_ts_work.pop(0) self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target)) self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)) return on_time return ba, got_response
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) yield helper.check(bitcoind, net) temp_work = yield helper.getwork(bitcoind) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (temp_work['height'] - 1,) print if not args.testnet: factory = yield connect_p2p() print 'Determining payout address...' pubkey_path = os.path.join(datadir_path, 'cached_payout_pubkey') if os.path.exists(pubkey_path): with open(pubkey_path, 'rb') as f: pubkey = f.read().strip('\r\n') print ' Loaded cached pubkey, payout address: %s...' % (bitcoin_data.pubkey_to_address(pubkey.decode('hex'), net.PARENT),) else: pubkey = None if pubkey is not None: res = yield deferral.retry('Error validating cached pubkey:', 5)(lambda: bitcoind.rpc_validatepubkey(pubkey))() if not res['isvalid'] or not res['ismine']: print ' Cached pubkey is either invalid or not controlled by local bitcoind!' address = None if pubkey is None: print ' Getting payout pubkey from bitcoind...' pubkey = yield deferral.retry('Error getting payout pubkey from bitcoind:', 5)(lambda: bitcoind.rpc_getnewpubkey('p2pool'))() with open(pubkey_path, 'wb') as f: f.write(pubkey) my_pubkey = pubkey.decode('hex') address = bitcoin_data.pubkey_to_address(my_pubkey, net.PARENT) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) shares = {} known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': contents.time_seen = 0 shares[contents.hash] = contents if len(shares) % 1000 == 0 and shares: print " %i" % (len(shares),) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print print 'Initializing work...' node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) yield node.start() for share_hash in shares: if share_hash not in node.tracker.items: ss.forget_share(share_hash) for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) del shares, known_verified node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) def save_shares(): for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in node.tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) print ' ...success!' print print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() node.p2p_node = p2pool_node.P2PNode(node, port=args.p2pool_port, max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, desired_outgoing_conns=args.p2pool_outgoing_conns, ) node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(node.p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) wb = work.WorkerBridge(node, my_pubkey, args.donation_percentage, merged_urls, args.worker_fee) web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var) caching_wb = worker_interface.CachingWorkerBridge(wb) worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/')) web_serverfactory = server.Site(web_root) serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = node.tracker.verified.added.watch(new_share) self.recent_messages = [] def joined(self, channel): self.in_channel = True def left(self, channel): self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / node.tracker.items[node.best_share_var.value].max_target / my_att_s) if my_att_s and node.best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), node.get_current_txouts().get(bitcoin_data.pubkey_to_script2(my_pubkey), 0) * 1e-6, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def get_web_root(wb, datadir_path, bitcoind_warning_var): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(): height, last = node.tracker.get_height_and_last(node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value*scale//total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice((script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total)*1e8) trunc = int(float(trunc)*1e8) return json.dumps(dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in get_current_scaled_txouts(total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None )) def get_local_rates(): miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = wb.local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt if datum['dead']: miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt return miner_hash_rates, miner_dead_hash_rates def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind) stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate/(1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None my_work = sum(bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes) actual_time = (node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s/(1 - global_stale_prop), actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares, ), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-8, warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value), donation_proportion=wb.donation_percentage/100, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = yield self.func(*self.args) defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res) web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, 720)/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, 720)))) web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in node.p2p_node.peers.itervalues()])) web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue( dict([(a, (yield b)) for a, b in [( '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks(lambda peer=peer: defer.returnValue( min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)]) ))() ) for peer in list(node.p2p_node.peers.itervalues())] ]) )))) web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT))) web_root.putChild('recent_blocks', WebInterface(lambda: [dict( ts=s.timestamp, hash='%064x' % s.header_hash, number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]), share='%064x' % s.hash, ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, 720, rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24*60*60: stat_log.pop(0) lookbehind = 3600//node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = get_local_rates() stat_log.append(dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-8, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) task.LoopingCall(update_stat_log).start(5*60) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer.addr if share.peer is not None else None, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT), donation=share.share_data['donation']/65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'), value=share.share_data['subsidy']*1e-8, ), txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])], ), ) new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads])) new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())])) new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())])) new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return '' share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share1a()) new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild('currency_info', WebInterface(lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60*60), 'last_day': graph.DataViewDescription(300, 60*60*24), 'last_week': graph.DataViewDescription(300, 60*60*24*7), 'last_month': graph.DataViewDescription(300, 60*60*24*30), 'last_year': graph.DataViewDescription(300, 60*60*24*365.25), } def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj): if not obj: last_bin_end = 0 bins = dv_desc.bin_count*[{}] else: pool_rates = obj['pool_rates'][dv_name] desired_versions = obj['desired_versions'][dv_name] def get_total_pool_rate(t): n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width) if n < 0 or n >= dv_desc.bin_count: return None total = sum(x[0] for x in pool_rates['bins'][n].values()) count = math.mean(x[1] for x in pool_rates['bins'][n].values()) if count == 0: return None return total/count last_bin_end = desired_versions['last_bin_end'] bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)] return graph.DataView(dv_desc, ds_desc, last_bin_end, bins) hd = graph.HistoryDatabase.from_obj({ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), 'incoming_peers': graph.DataStreamDescription(dataview_descriptions), 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions), 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True, default_func=build_desired_rates), 'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work}) @wb.share_received.watch def _(work, dead): t = time.time() hd.datastreams['local_share_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8) miner_hash_rates, miner_dead_hash_rates = get_local_rates() current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address)) hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming)) hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming)) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems())) hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems())) task.LoopingCall(add_point).start(5) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time()))) web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static'))) return web_root
def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event()): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(): height, last = node.tracker.get_height_and_last(node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value*scale//total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice((script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total)*1e8) trunc = int(float(trunc)*1e8) return json.dumps(dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in get_current_scaled_txouts(total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None )) def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind) stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) diff = bitcoin_data.target_to_difficulty(wb.current_work.value['bits'].target) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate/(1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target), network_block_difficulty=diff, network_hashrate=(diff * 2**32 // node.net.PARENT.BLOCK_PERIOD), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None my_work = sum(bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes) actual_time = (node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_last_difficulties = {} for addr in wb.last_work_shares.value: miner_last_difficulties[addr] = bitcoin_data.target_to_difficulty(wb.last_work_shares.value[addr].target) return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s/(1 - global_stale_prop), actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, miner_last_difficulties=miner_last_difficulties, efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares, ), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-8, warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value), donation_proportion=wb.donation_percentage/100, version=p2pool.__version__, protocol_version=p2p.Protocol.VERSION, fee=wb.worker_fee, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = yield self.func(*self.args) defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res) def decent_height(): return min(node.tracker.get_height(node.best_share_var.value), 720) web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height())))) web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e8) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join('%s%s' % (peer.transport.getPeer().host, ':'+str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else '') for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue( dict([(a, (yield b)) for a, b in [( '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks(lambda peer=peer: defer.returnValue( min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)]) ))() ) for peer in list(node.p2p_node.peers.itervalues())] ]) )))) web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT))) def height_from_coinbase(coinbase): opcode = ord(coinbase[0]) if len(coinbase) > 0 else 0 if opcode >= 1 and opcode <= 75: return pack.IntType(opcode*8).unpack(coinbase[1:opcode+1]) if opcode == 76: return pack.IntType(8).unpack(coinbase[1:2]) if opcode == 77: return pack.IntType(8).unpack(coinbase[1:3]) if opcode == 78: return pack.IntType(8).unpack(coinbase[1:5]) if opcode >= 79 and opcode <= 96: return opcode - 80 return None web_root.putChild('recent_blocks', WebInterface(lambda: [dict( ts=s.timestamp, hash='%064x' % s.header_hash, number=height_from_coinbase(s.share_data['coinbase']), share='%064x' % s.hash, ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24*60*60: stat_log.pop(0) lookbehind = 3600//node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() stat_log.append(dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-8, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) x = deferral.RobustLoopingCall(update_stat_log) x.start(5*60) stop_event.watch(x.stop) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children type_name=type(share).__name__, local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT), donation=share.share_data['donation']/65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], absheight=share.absheight, abswork=share.abswork, ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'), value=share.share_data['subsidy']*1e-8, last_txout_nonce='%016x' % share.contents['last_txout_nonce'], ), other_transaction_hashes=['%064x' % x for x in share.get_other_tx_hashes(node.tracker)], ), ) new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads])) new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())])) new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())])) new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) new_root.putChild('my_share_hashes', WebInterface(lambda: ['%064x' % my_share_hash for my_share_hash in wb.my_share_hashes])) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return '' share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share1a()) new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild('currency_info', WebInterface(lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60*60), 'last_day': graph.DataViewDescription(300, 60*60*24), 'last_week': graph.DataViewDescription(300, 60*60*24*7), 'last_month': graph.DataViewDescription(300, 60*60*24*30), 'last_year': graph.DataViewDescription(300, 60*60*24*365.25), } hd = graph.HistoryDatabase.from_obj({ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True, multivalue_undefined_means_0=True, default_func=graph.make_multivalue_migrator(dict(good='local_share_hash_rate', dead='local_dead_share_hash_rate', orphan='local_orphan_share_hash_rate'), post_func=lambda bins: [dict((k, (v[0] - (sum(bin.get(rem_k, (0, 0))[0] for rem_k in ['dead', 'orphan']) if k == 'good' else 0), v[1])) for k, v in bin.iteritems()) for bin in bins])), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), 'peers': graph.DataStreamDescription(dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming='incoming_peers', outgoing='outgoing_peers'))), 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), 'memory_usage': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) x.start(100) stop_event.watch(x.stop) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work}) @wb.share_received.watch def _(work, dead, share_hash): t = time.time() if not dead: hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=work)) else: hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=work)) def later(): res = node.tracker.is_child_of(share_hash, node.best_share_var.value) if res is None: res = False # share isn't connected to sharechain? assume orphaned if res and dead: # share was DOA, but is now in sharechain # move from dead to good hd.datastreams['local_share_hash_rates'].add_datum(t, dict(dead=-work, good=work)) elif not res and not dead: # share wasn't DOA, and isn't in sharechain # move from good to orphan hd.datastreams['local_share_hash_rates'].add_datum(t, dict(good=-work, orphan=work)) reactor.callLater(200, later) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0)*1e-8) miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-8) for user in miner_hash_rates if user in current_txouts_by_address)) hd.datastreams['peers'].add_datum(t, dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), )) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems())) try: hd.datastreams['memory_usage'].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc() x = deferral.RobustLoopingCall(add_point) x.start(5) stop_event.watch(x.stop) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time()))) web_root.putChild('static', static.File(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), 'web-static'))) return web_root
def get_work(self, pubkey_hash, desired_share_target, desired_pseudoshare_target): global print_throttle if (self.node.p2p_node is None or len(self.node.p2p_node.peers) == 0) and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is not connected to any peers') if self.node.best_share_var.value is None and self.node.net.PERSIST: raise jsonrpc.Error_for_code(-12345)(u'p2pool is downloading shares') if set(r[1:] if r.startswith('!') else r for r in self.node.bitcoind_work.value['rules']) - set(getattr(self.node.net, 'SOFTFORKS_REQUIRED', [])): raise jsonrpc.Error_for_code(-12345)(u'unknown rule activated') if self.merged_work.value: tree, size = bitcoin_data.make_auxpow_tree(self.merged_work.value) mm_hashes = [self.merged_work.value.get(tree.get(i), dict(hash=0))['hash'] for i in xrange(size)] mm_data = '\xfa\xbemm' + bitcoin_data.aux_pow_coinbase_type.pack(dict( merkle_root=bitcoin_data.merkle_hash(mm_hashes), size=size, nonce=0, )) mm_later = [(aux_work, mm_hashes.index(aux_work['hash']), mm_hashes) for chain_id, aux_work in self.merged_work.value.iteritems()] else: mm_data = '' mm_later = [] tx_hashes = [bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx)) for tx in self.current_work.value['transactions']] tx_map = dict(zip(tx_hashes, self.current_work.value['transactions'])) previous_share = self.node.tracker.items[self.node.best_share_var.value] if self.node.best_share_var.value is not None else None if previous_share is None: share_type = p2pool_data.Share else: previous_share_type = type(previous_share) if previous_share_type.SUCCESSOR is None or self.node.tracker.get_height(previous_share.hash) < self.node.net.CHAIN_LENGTH: share_type = previous_share_type else: successor_type = previous_share_type.SUCCESSOR counts = p2pool_data.get_desired_version_counts(self.node.tracker, self.node.tracker.get_nth_parent_hash(previous_share.hash, self.node.net.CHAIN_LENGTH*9//10), self.node.net.CHAIN_LENGTH//10) upgraded = counts.get(successor_type.VERSION, 0)/sum(counts.itervalues()) if upgraded > .65: print 'Switchover imminent. Upgraded: %.3f%% Threshold: %.3f%%' % (upgraded*100, 95) # Share -> NewShare only valid if 95% of hashes in [net.CHAIN_LENGTH*9//10, net.CHAIN_LENGTH] for new version if counts.get(successor_type.VERSION, 0) > sum(counts.itervalues())*95//100: share_type = successor_type else: share_type = previous_share_type if desired_share_target is None: desired_share_target = 2**256-1 local_hash_rate = self._estimate_local_hash_rate() if local_hash_rate is not None: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target(local_hash_rate * self.node.net.SHARE_PERIOD / 0.0167)) # limit to 1.67% of pool shares by modulating share difficulty local_addr_rates = self.get_local_addr_rates() lookbehind = 3600//self.node.net.SHARE_PERIOD block_subsidy = self.node.bitcoind_work.value['subsidy'] if previous_share is not None and self.node.tracker.get_height(previous_share.hash) > lookbehind: expected_payout_per_block = local_addr_rates.get(pubkey_hash, 0)/p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, lookbehind) \ * block_subsidy*(1-self.donation_percentage/100) # XXX doesn't use global stale rate to compute pool hash if expected_payout_per_block < self.node.net.PARENT.DUST_THRESHOLD: desired_share_target = min(desired_share_target, bitcoin_data.average_attempts_to_target((bitcoin_data.target_to_average_attempts(self.node.bitcoind_work.value['bits'].target)*self.node.net.SPREAD)*self.node.net.PARENT.DUST_THRESHOLD/block_subsidy) ) if True: share_info, gentx, other_transaction_hashes, get_share = share_type.generate_transaction( tracker=self.node.tracker, share_data=dict( previous_share_hash=self.node.best_share_var.value, coinbase=(script.create_push_script([ self.current_work.value['height'], ] + ([mm_data] if mm_data else []) + [ ]) + self.current_work.value['coinbaseflags'])[:100], nonce=random.randrange(2**32), pubkey_hash=pubkey_hash, subsidy=self.current_work.value['subsidy'], # add by caisirius for freecash devreward_value=self.current_work.value['devreward_value'], devreward_scriptpubkey=self.current_work.value['devreward_scriptpubkey'], donation=math.perfect_round(65535*self.donation_percentage/100), stale_info=(lambda (orphans, doas), total, (orphans_recorded_in_chain, doas_recorded_in_chain): 'orphan' if orphans > orphans_recorded_in_chain else 'doa' if doas > doas_recorded_in_chain else None )(*self.get_stale_counts()), desired_version=(share_type.SUCCESSOR if share_type.SUCCESSOR is not None else share_type).VOTING_VERSION, ), block_target=self.current_work.value['bits'].target, desired_timestamp=int(time.time() + 0.5), desired_target=desired_share_target, ref_merkle_link=dict(branch=[], index=0), desired_other_transaction_hashes_and_fees=zip(tx_hashes, self.current_work.value['transaction_fees']), net=self.node.net, known_txs=tx_map, base_subsidy=self.node.net.PARENT.SUBSIDY_FUNC(self.current_work.value['height']), ) packed_gentx = bitcoin_data.tx_id_type.pack(gentx) # stratum miners work with stripped transactions other_transactions = [tx_map[tx_hash] for tx_hash in other_transaction_hashes] mm_later = [(dict(aux_work, target=aux_work['target'] if aux_work['target'] != 'p2pool' else share_info['bits'].target), index, hashes) for aux_work, index, hashes in mm_later] if desired_pseudoshare_target is None: target = 2**256-1 local_hash_rate = self._estimate_local_hash_rate() if local_hash_rate is not None: target = min(target, bitcoin_data.average_attempts_to_target(local_hash_rate * 1)) # limit to 1 share response every second by modulating pseudoshare difficulty else: target = desired_pseudoshare_target target = max(target, share_info['bits'].target) for aux_work, index, hashes in mm_later: target = max(target, aux_work['target']) target = math.clip(target, self.node.net.PARENT.SANE_TARGET_RANGE) getwork_time = time.time() lp_count = self.new_work_event.times merkle_link = bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0) if share_info.get('segwit_data', None) is None else share_info['segwit_data']['txid_merkle_link'] if print_throttle is 0.0: print_throttle = time.time() else: current_time = time.time() if (current_time - print_throttle) > 5.0: print 'New work for worker! Difficulty: %.06f nbits: %x Share difficulty: %.06f Total block value: %.6f %s including %i transactions' % ( bitcoin_data.target_to_difficulty(target), bitcoin_data.FloatingInteger.from_target_upper_bound(target).bits, bitcoin_data.target_to_difficulty(share_info['bits'].target), self.current_work.value['subsidy']*1e-8, self.node.net.PARENT.SYMBOL, len(self.current_work.value['transactions']), ) print_throttle = time.time() #need this for stats self.last_work_shares.value[bitcoin_data.pubkey_hash_to_address(pubkey_hash, self.node.net.PARENT)]=share_info['bits'] ba = dict( version=max(self.current_work.value['version'], 0x20000000), previous_block=self.current_work.value['previous_block'], merkle_link=merkle_link, coinb1=packed_gentx[:-self.COINBASE_NONCE_LENGTH-4], coinb2=packed_gentx[-4:], timestamp=self.current_work.value['time'], bits=self.current_work.value['bits'], share_target=target, ) received_header_hashes = set() def got_response(header, user, coinbase_nonce): assert len(coinbase_nonce) == self.COINBASE_NONCE_LENGTH new_packed_gentx = packed_gentx[:-self.COINBASE_NONCE_LENGTH-4] + coinbase_nonce + packed_gentx[-4:] if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else packed_gentx new_gentx = bitcoin_data.tx_type.unpack(new_packed_gentx) if coinbase_nonce != '\0'*self.COINBASE_NONCE_LENGTH else gentx if bitcoin_data.is_segwit_tx(gentx): # reintroduce witness data to the gentx produced by stratum miners new_gentx['marker'] = 0 new_gentx['flag'] = gentx['flag'] new_gentx['witness'] = gentx['witness'] header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)) pow_hash = self.node.net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) try: if pow_hash <= header['bits'].target or p2pool.DEBUG: helper.submit_block(dict(header=header, txs=[new_gentx] + other_transactions), False, self.node.factory, self.node.bitcoind, self.node.bitcoind_work, self.node.net) if pow_hash <= header['bits'].target: print print 'GOT BLOCK FROM MINER! Passing to bitcoind! %s%064x' % (self.node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, header_hash) print except: log.err(None, 'Error while processing potential block:') user, _, _, _ = self.get_user_details(user) assert header['previous_block'] == ba['previous_block'] assert header['merkle_root'] == bitcoin_data.check_merkle_link(bitcoin_data.hash256(new_packed_gentx), merkle_link) assert header['bits'] == ba['bits'] on_time = self.new_work_event.times == lp_count for aux_work, index, hashes in mm_later: try: if pow_hash <= aux_work['target'] or p2pool.DEBUG: df = deferral.retry('Error submitting merged block: (will retry)', 10, 10)(aux_work['merged_proxy'].rpc_getauxblock)( pack.IntType(256, 'big').pack(aux_work['hash']).encode('hex'), bitcoin_data.aux_pow_type.pack(dict( merkle_tx=dict( tx=new_gentx, block_hash=header_hash, merkle_link=merkle_link, ), merkle_link=bitcoin_data.calculate_merkle_link(hashes, index), parent_block_header=header, )).encode('hex'), ) @df.addCallback def _(result, aux_work=aux_work): if result != (pow_hash <= aux_work['target']): print >>sys.stderr, 'Merged block submittal result: %s Expected: %s' % (result, pow_hash <= aux_work['target']) else: print 'Merged block submittal result: %s' % (result,) @df.addErrback def _(err): log.err(err, 'Error submitting merged block:') except: log.err(None, 'Error while processing merged mining POW:') if pow_hash <= share_info['bits'].target and header_hash not in received_header_hashes: last_txout_nonce = pack.IntType(8*self.COINBASE_NONCE_LENGTH).unpack(coinbase_nonce) share = get_share(header, last_txout_nonce) print 'GOT SHARE! %s %s prev %s age %.2fs%s' % ( user, p2pool_data.format_hash(share.hash), p2pool_data.format_hash(share.previous_hash), time.time() - getwork_time, ' DEAD ON ARRIVAL' if not on_time else '', ) self.my_share_hashes.add(share.hash) if not on_time: self.my_doa_share_hashes.add(share.hash) self.node.tracker.add(share) self.node.set_best_share() try: if (pow_hash <= header['bits'].target or p2pool.DEBUG) and self.node.p2p_node is not None: self.node.p2p_node.broadcast_share(share.hash) except: log.err(None, 'Error forwarding block solution:') self.share_received.happened(bitcoin_data.target_to_average_attempts(share.target), not on_time, share.hash) if pow_hash > target: print 'Worker %s submitted share with hash > target:' % (user,) print ' Hash: %56x' % (pow_hash,) print ' Target: %56x' % (target,) elif header_hash in received_header_hashes: print >>sys.stderr, 'Worker %s submitted share more than once!' % (user,) else: received_header_hashes.add(header_hash) self.pseudoshare_received.happened(bitcoin_data.target_to_average_attempts(target), not on_time, user) self.recent_shares_ts_work.append((time.time(), bitcoin_data.target_to_average_attempts(target))) while len(self.recent_shares_ts_work) > 50: self.recent_shares_ts_work.pop(0) self.local_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), dead=not on_time, user=user, share_target=share_info['bits'].target)) self.local_addr_rate_monitor.add_datum(dict(work=bitcoin_data.target_to_average_attempts(target), pubkey_hash=pubkey_hash)) return on_time return ba, got_response
def toFundingAddress(script, testnet=True): hsh = bitcoin_data.hash160(script.decode('hex')) return bitcoin_data.pubkey_hash_to_address(hsh, Network(testnet))
def get_web_root(wb, datadir_path, bitcoind_getinfo_var, stop_event=variable.Event()): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(): height, last = node.tracker.get_height_and_last(node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights( node.best_share_var.value, min(height, 720), 65535 * 2 ** 256 ) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script] / total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value * scale // total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice((script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc="0.01"): if total is None: return "need total argument. go to patron_sendmany/<TOTAL>" total = int(float(total) * 1e8) trunc = int(float(trunc) * 1e8) return json.dumps( dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e8) for script, value in get_current_scaled_txouts(total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None ) ) def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind ) stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate / (1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum( 1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes ) my_orphan_count = sum( 1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data["stale_info"] == "orphan" ) my_doa_count = sum( 1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data["stale_info"] == "doa" ) my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count / my_share_count if my_share_count != 0 else None my_work = sum( bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes ) actual_time = ( node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp ) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s / (1 - global_stale_prop), actual=share_att_s / (1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count / my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count / my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, efficiency_if_miner_perfect=(1 - stale_orphan_shares / shares) / (1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares + stale_doa_shares) / shares) / (1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict(total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target ), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value["bits"].target), block_value=node.bitcoind_work.value["subsidy"] * 1e-8, warnings=p2pool_data.get_warnings( node.tracker, node.best_share_var.value, node.net, bitcoind_getinfo_var.value, node.bitcoind_work.value ), donation_proportion=wb.donation_percentage / 100, version=p2pool.__version__, protocol_version=p2p.Protocol.VERSION, fee=wb.worker_fee, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type="application/json", args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) @defer.inlineCallbacks def render_GET(self, request): request.setHeader("Content-Type", self.mime_type) request.setHeader("Access-Control-Allow-Origin", "*") res = yield self.func(*self.args) defer.returnValue(json.dumps(res) if self.mime_type == "application/json" else res) def decent_height(): return min(node.tracker.get_height(node.best_share_var.value), 720) web_root.putChild( "rate", WebInterface( lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height()) / (1 - p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height())) ), ) web_root.putChild( "difficulty", WebInterface( lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target) ), ) web_root.putChild("users", WebInterface(get_users)) web_root.putChild( "user_stales", WebInterface( lambda: dict( (bitcoin_data.pubkey_hash_to_address(ph, node.net.PARENT), prop) for ph, prop in p2pool_data.get_user_stale_props( node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value) ).iteritems() ) ), ) web_root.putChild("fee", WebInterface(lambda: wb.worker_fee)) web_root.putChild( "current_payouts", WebInterface( lambda: dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e8) for script, value in node.get_current_txouts().iteritems() ) ), ) web_root.putChild("patron_sendmany", WebInterface(get_patron_sendmany, "text/plain")) web_root.putChild("global_stats", WebInterface(get_global_stats)) web_root.putChild("local_stats", WebInterface(get_local_stats)) web_root.putChild( "peer_addresses", WebInterface( lambda: " ".join( "%s%s" % ( peer.transport.getPeer().host, ":" + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != node.net.P2P_PORT else "", ) for peer in node.p2p_node.peers.itervalues() ) ), ) web_root.putChild( "peer_txpool_sizes", WebInterface( lambda: dict( ("%s:%i" % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues() ) ), ) web_root.putChild( "pings", WebInterface( defer.inlineCallbacks( lambda: defer.returnValue( dict( [ (a, (yield b)) for a, b in [ ( "%s:%i" % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks( lambda peer=peer: defer.returnValue( min( [ ( yield peer.do_ping() .addCallback(lambda x: x / 0.001) .addErrback(lambda fail: None) ) for i in xrange(3) ] ) ) )(), ) for peer in list(node.p2p_node.peers.itervalues()) ] ] ) ) ) ), ) web_root.putChild( "peer_versions", WebInterface( lambda: dict(("%s:%i" % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()) ), ) web_root.putChild( "payout_addr", WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(wb.my_pubkey_hash, node.net.PARENT)) ) web_root.putChild( "recent_blocks", WebInterface( lambda: [ dict( ts=s.timestamp, hash="%064x" % s.header_hash, number=p2pool_data.parse_bip0034(s.share_data["coinbase"])[0], share="%064x" % s.hash, ) for s in node.tracker.get_chain( node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24 * 60 * 60 // node.net.SHARE_PERIOD), ) if s.pow_hash <= s.header["bits"].target ] ), ) web_root.putChild("uptime", WebInterface(lambda: time.time() - start_time)) web_root.putChild( "stale_rates", WebInterface( lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True) ), ) new_root = resource.Resource() web_root.putChild("web", new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, "stats")): try: with open(os.path.join(datadir_path, "stats"), "rb") as f: stat_log = json.loads(f.read()) except: log.err(None, "Error loading stats:") def update_stat_log(): while stat_log and stat_log[0]["time"] < time.time() - 24 * 60 * 60: stat_log.pop(0) lookbehind = 3600 // node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() stat_log.append( dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind ) / (1 - global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0) * 1e-8, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target ), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value["bits"].target), block_value=node.bitcoind_work.value["subsidy"] * 1e-8, ) ) with open(os.path.join(datadir_path, "stats"), "wb") as f: f.write(json.dumps(stat_log)) x = deferral.RobustLoopingCall(update_stat_log) x.start(5 * 60) stop_event.watch(x.stop) new_root.putChild("log", WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent="%064x" % share.previous_hash, children=[ "%064x" % x for x in sorted( node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())), ) ], # sorted from most children to least children type_name=type(share).__name__, local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT), donation=share.share_data["donation"] / 65535, stale_info=share.share_data["stale_info"], nonce=share.share_data["nonce"], desired_version=share.share_data["desired_version"], absheight=share.absheight, abswork=share.abswork, ), block=dict( hash="%064x" % share.header_hash, header=dict( version=share.header["version"], previous_block="%064x" % share.header["previous_block"], merkle_root="%064x" % share.header["merkle_root"], timestamp=share.header["timestamp"], target=share.header["bits"].target, nonce=share.header["nonce"], ), gentx=dict( hash="%064x" % share.gentx_hash, coinbase=share.share_data["coinbase"].ljust(2, "\x00").encode("hex"), value=share.share_data["subsidy"] * 1e-8, last_txout_nonce="%016x" % share.contents["last_txout_nonce"], ), other_transaction_hashes=["%064x" % x for x in share.get_other_tx_hashes(node.tracker)], ), ) new_root.putChild("share", WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild("heads", WebInterface(lambda: ["%064x" % x for x in node.tracker.heads])) new_root.putChild("verified_heads", WebInterface(lambda: ["%064x" % x for x in node.tracker.verified.heads])) new_root.putChild( "tails", WebInterface(lambda: ["%064x" % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())]), ) new_root.putChild( "verified_tails", WebInterface( lambda: [ "%064x" % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set()) ] ), ) new_root.putChild("best_share_hash", WebInterface(lambda: "%064x" % node.best_share_var.value)) new_root.putChild( "my_share_hashes", WebInterface(lambda: ["%064x" % my_share_hash for my_share_hash in wb.my_share_hashes]) ) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return "" share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share()) new_root.putChild( "share_data", WebInterface(lambda share_hash_str: get_share_data(share_hash_str), "application/octet-stream") ) new_root.putChild( "currency_info", WebInterface( lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, tx_explorer_url_prefix=node.net.PARENT.TX_EXPLORER_URL_PREFIX, ) ), ) new_root.putChild("version", WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, "graph_db") hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, "Error reading graph database:") dataview_descriptions = { "last_hour": graph.DataViewDescription(150, 60 * 60), "last_day": graph.DataViewDescription(300, 60 * 60 * 24), "last_week": graph.DataViewDescription(300, 60 * 60 * 24 * 7), "last_month": graph.DataViewDescription(300, 60 * 60 * 24 * 30), "last_year": graph.DataViewDescription(300, 60 * 60 * 24 * 365.25), } hd = graph.HistoryDatabase.from_obj( { "local_hash_rate": graph.DataStreamDescription(dataview_descriptions, is_gauge=False), "local_dead_hash_rate": graph.DataStreamDescription(dataview_descriptions, is_gauge=False), "local_share_hash_rates": graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True, multivalue_undefined_means_0=True, default_func=graph.make_multivalue_migrator( dict( good="local_share_hash_rate", dead="local_dead_share_hash_rate", orphan="local_orphan_share_hash_rate", ), post_func=lambda bins: [ dict( ( k, ( v[0] - ( sum(bin.get(rem_k, (0, 0))[0] for rem_k in ["dead", "orphan"]) if k == "good" else 0 ), v[1], ), ) for k, v in bin.iteritems() ) for bin in bins ], ), ), "pool_rates": graph.DataStreamDescription( dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True ), "current_payout": graph.DataStreamDescription(dataview_descriptions), "current_payouts": graph.DataStreamDescription(dataview_descriptions, multivalues=True), "peers": graph.DataStreamDescription( dataview_descriptions, multivalues=True, default_func=graph.make_multivalue_migrator(dict(incoming="incoming_peers", outgoing="outgoing_peers")), ), "miner_hash_rates": graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), "miner_dead_hash_rates": graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True ), "desired_version_rates": graph.DataStreamDescription( dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True ), "traffic_rate": graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), "getwork_latency": graph.DataStreamDescription(dataview_descriptions), "memory_usage": graph.DataStreamDescription(dataview_descriptions), }, hd_obj, ) x = deferral.RobustLoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) x.start(100) stop_event.watch(x.stop) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams["local_hash_rate"].add_datum(t, work) if dead: hd.datastreams["local_dead_hash_rate"].add_datum(t, work) if user is not None: hd.datastreams["miner_hash_rates"].add_datum(t, {user: work}) if dead: hd.datastreams["miner_dead_hash_rates"].add_datum(t, {user: work}) @wb.share_received.watch def _(work, dead, share_hash): t = time.time() if not dead: hd.datastreams["local_share_hash_rates"].add_datum(t, dict(good=work)) else: hd.datastreams["local_share_hash_rates"].add_datum(t, dict(dead=work)) def later(): res = node.tracker.is_child_of(share_hash, node.best_share_var.value) if res is None: res = False # share isn't connected to sharechain? assume orphaned if res and dead: # share was DOA, but is now in sharechain # move from dead to good hd.datastreams["local_share_hash_rates"].add_datum(t, dict(dead=-work, good=work)) elif not res and not dead: # share wasn't DOA, and isn't in sharechain # move from good to orphan hd.datastreams["local_share_hash_rates"].add_datum(t, dict(good=-work, orphan=work)) reactor.callLater(200, later) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams["traffic_rate"].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min( node.net.CHAIN_LENGTH, 60 * 60 // node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value) ) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams["pool_rates"].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams["current_payout"].add_datum( t, current_txouts.get(bitcoin_data.pubkey_hash_to_script2(wb.my_pubkey_hash), 0) * 1e-8 ) miner_hash_rates, miner_dead_hash_rates = wb.get_local_rates() current_txouts_by_address = dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems() ) hd.datastreams["current_payouts"].add_datum( t, dict( (user, current_txouts_by_address[user] * 1e-8) for user in miner_hash_rates if user in current_txouts_by_address ), ) hd.datastreams["peers"].add_datum( t, dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), ) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams["desired_version_rates"].add_datum( t, dict((str(k), v / vs_total * pool_total) for k, v in vs.iteritems()) ) try: hd.datastreams["memory_usage"].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc() x = deferral.RobustLoopingCall(add_point) x.start(5) stop_event.watch(x.stop) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams["getwork_latency"].add_datum(time.time(), new_work["latency"]) new_root.putChild( "graph_data", WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time())) ) web_root.putChild("static", static.File(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "web-static"))) return web_root
def get_web_root(tracker, current_work, current_work2, get_current_txouts, datadir_path, net, get_stale_counts, my_pubkey_hash, local_rate_monitor, worker_fee, p2p_node, my_share_hashes, recent_blocks, pseudoshare_received, share_received): start_time = time.time() web_root = resource.Resource() def get_users(): height, last = tracker.get_height_and_last(current_work.value['best_share_hash']) weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[bitcoin_data.script2_to_human(script, net.PARENT)] = weights[script]/total_weight return json.dumps(res) def get_current_scaled_txouts(scale, trunc=0): txouts = get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value*scale//total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice((script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total)*1e8) trunc = int(float(trunc)*1e8) return dict( (bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_scaled_txouts(total, trunc).iteritems() if bitcoin_data.script2_to_address(script, net.PARENT) is not None ) def get_global_stats(): # averaged over last hour lookbehind = 3600//net.SHARE_PERIOD if tracker.get_height(current_work.value['best_share_hash']) < lookbehind: return None nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind) stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate/(1 - stale_prop), pool_stale_prop=stale_prop, ) def get_local_stats(): lookbehind = 3600//net.SHARE_PERIOD if tracker.get_height(current_work.value['best_share_hash']) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind) my_unstale_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes) my_orphan_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 253) my_doa_count = sum(1 for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind) if share.hash in my_share_hashes and share.share_data['stale_info'] == 254) my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None my_work = sum(bitcoin_data.target_to_average_attempts(share.target) for share in tracker.get_chain(current_work.value['best_share_hash'], lookbehind - 1) if share.hash in my_share_hashes) actual_time = (tracker.shares[current_work.value['best_share_hash']].timestamp - tracker.shares[tracker.get_nth_parent_hash(current_work.value['best_share_hash'], lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt if datum['dead']: miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s/(1 - global_stale_prop), actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection ) class WebInterface(resource.Resource): def __init__(self, func, mime_type='application/json', args=()): resource.Resource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = self.func(*self.args) return json.dumps(res) if self.mime_type == 'application/json' else res web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720)/(1-p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)))) web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(tracker.shares[current_work.value['best_share_hash']].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild('fee', WebInterface(lambda: worker_fee)) web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, net.PARENT), value/1e8) for script, value in get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild('peer_addresses', WebInterface(lambda: ' '.join(peer.transport.getPeer().host + (':' + str(peer.transport.getPeer().port) if peer.transport.getPeer().port != net.P2P_PORT else '') for peer in p2p_node.peers.itervalues()), 'text/plain')) web_root.putChild('peer_versions', WebInterface(lambda: ''.join('%s:%i ' % peer.addr + peer.other_sub_version + '\n' for peer in p2p_node.peers.itervalues()), 'text/plain')) web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT))) web_root.putChild('recent_blocks', WebInterface(lambda: recent_blocks)) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24*60*60: stat_log.pop(0) lookbehind = 3600//net.SHARE_PERIOD if tracker.get_height(current_work.value['best_share_hash']) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = get_stale_counts() miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt if datum['dead']: miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt stat_log.append(dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], lookbehind)/(1-global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, peers=dict( incoming=sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts(tracker.shares[current_work.value['best_share_hash']].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(current_work.value['bits'].target), block_value=current_work2.value['subsidy']*1e-8, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) task.LoopingCall(update_stat_log).start(5*60) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in tracker.shares: return None share = tracker.shares[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, children=['%064x' % x for x in sorted(tracker.reverse_shares.get(share.hash, set()), key=lambda sh: -len(tracker.reverse_shares.get(sh, set())))], # sorted from most children to least children local=dict( verified=share.hash in tracker.verified.shares, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer.addr if share.peer is not None else None, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address(share.new_script, net.PARENT), donation=share.share_data['donation']/65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'), value=share.share_data['subsidy']*1e-8, ), txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])], ), ) new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in tracker.heads])) new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in tracker.verified.heads])) new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in tracker.tails for x in tracker.reverse_shares.get(t, set())])) new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in tracker.verified.tails for x in tracker.verified.reverse_shares.get(t, set())])) new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % current_work.value['best_share_hash'])) class Explorer(resource.Resource): def render_GET(self, request): return 'moved to /static/explorer.html' def getChild(self, child, request): return self new_root.putChild('explorer', Explorer()) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60*60), 'last_day': graph.DataViewDescription(300, 60*60*24), 'last_week': graph.DataViewDescription(300, 60*60*24*7), 'last_month': graph.DataViewDescription(300, 60*60*24*30), 'last_year': graph.DataViewDescription(300, 60*60*24*365.25), } def combine_and_keep_largest(*dicts): res = {} for d in dicts: for k, v in d.iteritems(): res[k] = res.get(k, 0) + v return dict((k, v) for k, v in sorted(res.iteritems(), key=lambda (k, v): v)[-30:] if v) hd = graph.HistoryDatabase.from_obj({ 'local_hash_rate': graph.DataStreamDescription(False, dataview_descriptions), 'local_dead_hash_rate': graph.DataStreamDescription(False, dataview_descriptions), 'local_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions), 'local_dead_share_hash_rate': graph.DataStreamDescription(False, dataview_descriptions), 'pool_rate': graph.DataStreamDescription(True, dataview_descriptions), 'pool_stale_rate': graph.DataStreamDescription(True, dataview_descriptions), 'current_payout': graph.DataStreamDescription(True, dataview_descriptions), 'incoming_peers': graph.DataStreamDescription(True, dataview_descriptions), 'outgoing_peers': graph.DataStreamDescription(True, dataview_descriptions), 'miner_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict), 'miner_dead_hash_rates': graph.DataStreamDescription(False, dataview_descriptions, {}, combine_and_keep_largest, math.mult_dict), }, hd_obj) task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))).start(100) @pseudoshare_received.watch def _(work, dead, user, had_vip_pass): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work}) @share_received.watch def _(work, dead): t = time.time() hd.datastreams['local_share_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work) def add_point(): if tracker.get_height(current_work.value['best_share_hash']) < 720: return nonstalerate = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], 720) poolrate = nonstalerate / (1 - p2pool_data.get_average_stale_prop(tracker, current_work.value['best_share_hash'], 720)) t = time.time() hd.datastreams['pool_rate'].add_datum(t, poolrate) hd.datastreams['pool_stale_rate'].add_datum(t, poolrate - nonstalerate) hd.datastreams['current_payout'].add_datum(t, get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8) hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming)) hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in p2p_node.peers.itervalues() if not peer.incoming)) task.LoopingCall(add_point).start(5) new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time()))) web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static'))) return web_root
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print traffic_happened = variable.Event() @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) @deferral.retry('Error while checking Bitcoin connection:', 1) @defer.inlineCallbacks def check(): if not (yield net.PARENT.RPC_CHECK(bitcoind)): print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!" raise deferral.RetrySilentlyException() if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']): print >>sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!' raise deferral.RetrySilentlyException() yield check() temp_work = yield getwork(bitcoind) if not args.testnet: factory = yield connect_p2p() block_height_var = variable.Variable(None) @defer.inlineCallbacks def poll_height(): block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)())) yield poll_height() task.LoopingCall(poll_height).start(60*60) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (block_height_var.value,) print print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address,) else: address = None if address is not None: res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print my_share_hashes = set() my_doa_share_hashes = set() tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes) shared_share_hashes = set() ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': if contents.hash in tracker.items: continue shared_share_hashes.add(contents.hash) contents.time_seen = 0 tracker.add(contents) if len(tracker.items) % 1000 == 0 and tracker.items: print " %i" % (len(tracker.items),) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...inserting %i verified shares..." % (len(known_verified),) for h in known_verified: if h not in tracker.items: ss.forget_verified_share(h) continue tracker.verified.add(tracker.items[h]) print " ...done loading %i shares!" % (len(tracker.items),) print tracker.removed.watch(lambda share: ss.forget_share(share.hash)) tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash)) print 'Initializing work...' # BITCOIND WORK bitcoind_work = variable.Variable((yield getwork(bitcoind))) @defer.inlineCallbacks def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target): return bitcoind_best_block = bitcoind_work.value['previous_block'] if (best_block_header.value is None or ( new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or ( bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and best_block_header.value['previous_block'] != bitcoind_best_block )): # new is current and previous is not a child of current best_block_header.set(new_header) @defer.inlineCallbacks def poll_header(): handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block']))) bitcoind_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')(poll_header)() # BEST SHARE get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net) best_share_var = variable.Variable(None) desired_var = variable.Variable(None) def set_best_share(): best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits']) best_share_var.set(best) desired_var.set(desired) bitcoind_work.changed.watch(lambda _: set_best_share()) set_best_share() print ' ...success!' print # setup p2p logic and join p2pool network class Node(p2p.Node): def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 for share in shares: if share.hash in tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None) tracker.add(share) if new_count: set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH) @defer.inlineCallbacks def handle_share_hashes(self, hashes, peer): new_hashes = [x for x in hashes if x not in tracker.items] if not new_hashes: return try: shares = yield peer.get_shares( hashes=new_hashes, parents=0, stops=[], ) except: log.err(None, 'in handle_share_hashes:') else: self.handle_shares(shares, peer) def handle_get_shares(self, hashes, parents, stops, peer): parents = min(parents, 1000//len(hashes)) stops = set(stops) shares = [] for share_hash in hashes: for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))): if share.hash in stops: break shares.append(share) print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1]) return shares def handle_bestblock(self, header, peer): if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target: raise p2p.PeerMisbehavingError('received block header fails PoW test') handle_header(header) @deferral.retry('Error submitting primary block: (will retry)', 10, 10) def submit_block_p2p(block): if factory.conn.value is None: print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block) @deferral.retry('Error submitting block: (will retry)', 10, 10) @defer.inlineCallbacks def submit_block_rpc(block, ignore_failure): if bitcoind_work.value['use_getblocktemplate']: result = yield bitcoind.rpc_submitblock(bitcoin_data.block_type.pack(block).encode('hex')) success = result is None else: result = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex')) success = result success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target if (not success and success_expected and not ignore_failure) or (success and not success_expected): print >>sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (success, result, success_expected) def submit_block(block, ignore_failure): submit_block_p2p(block) submit_block_rpc(block, ignore_failure) @tracker.verified.added.watch def _(share): if share.pow_hash <= share.header['bits'].target: submit_block(share.as_block(tracker), ignore_failure=True) print print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print def spread(): if (get_height_rel_highest(share.header['previous_block']) > -5 or bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]): broadcast_share(share.hash) spread() reactor.callLater(5, spread) # so get_height_rel_highest can update print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() p2p_node = Node( best_share_hash_func=lambda: best_share_var.value, port=args.p2pool_port, net=net, addr_store=addrs, connect_addrs=connect_addrs, max_incoming_conns=args.p2pool_conns, traffic_happened=traffic_happened, ) p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) @best_block_header.changed.watch def _(header): for peer in p2p_node.peers.itervalues(): peer.send_bestblock(header=header) @defer.inlineCallbacks def broadcast_share(share_hash): shares = [] for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))): if share.hash in shared_share_hashes: break shared_share_hashes.add(share.hash) shares.append(share) for peer in list(p2p_node.peers.itervalues()): yield peer.sendShares([share for share in shares if share.peer is not peer]) # send share when the chain changes to their chain best_share_var.changed.watch(broadcast_share) def save_shares(): for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) @apply @defer.inlineCallbacks def download_shares(): while True: desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net) wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var) web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened) worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/')) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 0.51: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < 0.49: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = tracker.verified.added.watch(new_share) self.recent_messages = [] def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = tracker.get_height(best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(tracker.verified.items), len(tracker.items), len(p2p_node.peers), sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__, ) print traffic_happened = variable.Event() @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % ( args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % ( url, args.bitcoind_rpc_username) bitcoind = jsonrpc.Proxy( url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) @deferral.retry('Error while checking Bitcoin connection:', 1) @defer.inlineCallbacks def check(): if not (yield net.PARENT.RPC_CHECK(bitcoind)): print >> sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!" raise deferral.RetrySilentlyException() if not net.VERSION_CHECK( (yield bitcoind.rpc_getinfo())['version']): print >> sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!' raise deferral.RetrySilentlyException() yield check() temp_work = yield getwork(bitcoind) if not args.testnet: factory = yield connect_p2p() block_height_var = variable.Variable(None) @defer.inlineCallbacks def poll_height(): block_height_var.set( (yield deferral.retry('Error while calling getblockcount:')( bitcoind.rpc_getblockcount)())) yield poll_height() task.LoopingCall(poll_height).start(60 * 60) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')( bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20 * 60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'], ) print ' Current block height: %i' % (block_height_var.value, ) print print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address, ) else: address = None if address is not None: res = yield deferral.retry( 'Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry( 'Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash( address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address( my_pubkey_hash, net.PARENT) print my_share_hashes = set() my_doa_share_hashes = set() tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes) shared_share_hashes = set() ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': if contents.hash in tracker.items: continue shared_share_hashes.add(contents.hash) contents.time_seen = 0 tracker.add(contents) if len(tracker.items) % 1000 == 0 and tracker.items: print " %i" % (len(tracker.items), ) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...inserting %i verified shares..." % ( len(known_verified), ) for h in known_verified: if h not in tracker.items: ss.forget_verified_share(h) continue tracker.verified.add(tracker.items[h]) print " ...done loading %i shares!" % (len(tracker.items), ) print tracker.removed.watch(lambda share: ss.forget_share(share.hash)) tracker.verified.removed.watch( lambda share: ss.forget_verified_share(share.hash)) tracker.removed.watch( lambda share: shared_share_hashes.discard(share.hash)) print 'Initializing work...' # BITCOIND WORK bitcoind_work = variable.Variable((yield getwork(bitcoind))) @defer.inlineCallbacks def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set( (yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target): return bitcoind_best_block = bitcoind_work.value['previous_block'] if (best_block_header.value is None or (new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256( bitcoin_data.block_header_type.pack( best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or (bitcoin_data.hash256( bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and best_block_header.value['previous_block'] != bitcoind_best_block) ): # new is current and previous is not a child of current best_block_header.set(new_header) @defer.inlineCallbacks def poll_header(): handle_header((yield factory.conn.value.get_block_header( bitcoind_work.value['previous_block']))) bitcoind_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')( poll_header)() # BEST SHARE get_height_rel_highest = yield height_tracker.get_height_rel_highest_func( bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net) best_share_var = variable.Variable(None) desired_var = variable.Variable(None) def set_best_share(): best, desired = tracker.think( get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits']) best_share_var.set(best) desired_var.set(desired) bitcoind_work.changed.watch(lambda _: set_best_share()) set_best_share() print ' ...success!' print # setup p2p logic and join p2pool network class Node(p2p.Node): def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % ( len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 for share in shares: if share.hash in tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None) tracker.add(share) if new_count: set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % ( len(shares), new_count, len( tracker.items), 2 * net.CHAIN_LENGTH) @defer.inlineCallbacks def handle_share_hashes(self, hashes, peer): new_hashes = [x for x in hashes if x not in tracker.items] if not new_hashes: return try: shares = yield peer.get_shares( hashes=new_hashes, parents=0, stops=[], ) except: log.err(None, 'in handle_share_hashes:') else: self.handle_shares(shares, peer) def handle_get_shares(self, hashes, parents, stops, peer): parents = min(parents, 1000 // len(hashes)) stops = set(stops) shares = [] for share_hash in hashes: for share in tracker.get_chain( share_hash, min(parents + 1, tracker.get_height(share_hash))): if share.hash in stops: break shares.append(share) print 'Sending %i shares to %s:%i' % ( len(shares), peer.addr[0], peer.addr[1]) return shares def handle_bestblock(self, header, peer): if net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack( header)) > header['bits'].target: raise p2p.PeerMisbehavingError( 'received block header fails PoW test') handle_header(header) @deferral.retry('Error submitting primary block: (will retry)', 10, 10) def submit_block_p2p(block): if factory.conn.value is None: print >> sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % ( net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256( bitcoin_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block) @deferral.retry('Error submitting block: (will retry)', 10, 10) @defer.inlineCallbacks def submit_block_rpc(block, ignore_failure): if bitcoind_work.value['use_getblocktemplate']: result = yield bitcoind.rpc_submitblock( bitcoin_data.block_type.pack(block).encode('hex')) success = result is None else: result = yield bitcoind.rpc_getmemorypool( bitcoin_data.block_type.pack(block).encode('hex')) success = result success_expected = net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack( block['header'])) <= block['header']['bits'].target if (not success and success_expected and not ignore_failure) or (success and not success_expected): print >> sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % ( success, result, success_expected) def submit_block(block, ignore_failure): submit_block_p2p(block) submit_block_rpc(block, ignore_failure) @tracker.verified.added.watch def _(share): if share.pow_hash <= share.header['bits'].target: submit_block(share.as_block(tracker), ignore_failure=True) print print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % ( p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print def spread(): if (get_height_rel_highest(share.header['previous_block']) > -5 or bitcoind_work.value['previous_block'] in [ share.header['previous_block'], share.header_hash ]): broadcast_share(share.hash) spread() reactor.callLater( 5, spread) # so get_height_rel_highest can update print 'Joining p2pool network using port %i...' % (args.p2pool_port, ) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update( dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >> sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() p2p_node = Node( best_share_hash_func=lambda: best_share_var.value, port=args.p2pool_port, net=net, addr_store=addrs, connect_addrs=connect_addrs, max_incoming_conns=args.p2pool_conns, traffic_happened=traffic_happened, ) p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) @best_block_header.changed.watch def _(header): for peer in p2p_node.peers.itervalues(): peer.send_bestblock(header=header) @defer.inlineCallbacks def broadcast_share(share_hash): shares = [] for share in tracker.get_chain( share_hash, min(5, tracker.get_height(share_hash))): if share.hash in shared_share_hashes: break shared_share_hashes.add(share.hash) shares.append(share) for peer in list(p2p_node.peers.itervalues()): yield peer.sendShares( [share for share in shares if share.peer is not peer]) # send share when the chain changes to their chain best_share_var.changed.watch(broadcast_share) def save_shares(): for share in tracker.get_chain( best_share_var.value, min(tracker.get_height(best_share_var.value), 2 * net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) @apply @defer.inlineCallbacks def download_shares(): while True: desired = yield desired_var.get_when_satisfies( lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % ( p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep( 1 ) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping( lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1 / 120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) get_current_txouts = lambda: p2pool_data.get_expected_payouts( tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net) wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var) web_root = web.get_web_root( tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened) worker_interface.WorkerInterface(wb).attach_to( web_root, get_handler=lambda request: request.redirect('/static/')) deferral.retry('Error binding to worker port:', traceback=False)( reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % ( worker_endpoint[1], ) if args.donation_percentage > 0.51: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % ( args.donation_percentage, ) elif args.donation_percentage < 0.49: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % ( args.donation_percentage, ) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % ( args.donation_percentage, ) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal( signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''. join(traceback.format_stack()))) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100), ) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if share.pow_hash <= share.header[ 'bits'].target and abs(share.timestamp - time.time()) < 10 * 60: yield deferral.sleep(random.expovariate(1 / 60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % ( net.NAME.upper(), bitcoin_data.script2_to_address( share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash, ) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = tracker.verified.added.watch(new_share) self.recent_messages = [] def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = tracker.get_height(best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(tracker.verified.items), len(tracker.items), len(p2p_node.peers), sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work'] / dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf( sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / tracker.items[ best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop( tracker, best_share_var.value, min(60 * 60 // net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second( tracker, best_share_var.value, min(height - 1, 60 * 60 // net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x) / (1 - stale_prop)), get_current_txouts().get( bitcoin_data.pubkey_hash_to_script2( my_pubkey_hash), 0) * 1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100 * stale_prop, math.format_dt( 2**256 / bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings( tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value): print >> sys.stderr, '#' * 40 print >> sys.stderr, '>>> Warning: ' + warning print >> sys.stderr, '#' * 40 if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')