def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(sdfsdf): height, last = node.tracker.get_height_and_last( node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights( node.best_share_var.value, min(height, 720), 65535 * 2**256) res = {} print "test" for script in sorted(weights, key=lambda s: weights[s]): print script res[bitcoin_data.script2_to_address( script, node.net.PARENT)] = weights[script] / total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value * scale // total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice( (script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice( results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total) * 1e8) trunc = int(float(trunc) * 1e8) return json.dumps( dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e6) for script, value in get_current_scaled_txouts( total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None)) def get_local_rates(): miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = wb.local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get( datum['user'], 0) + datum['work'] / dt if datum['dead']: miner_dead_hash_rates[ datum['user']] = miner_dead_hash_rates.get( datum['user'], 0) + datum['work'] / dt return miner_hash_rates, miner_dead_hash_rates def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind) stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate / (1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty( node.tracker.items[node.best_share_var.value].max_target), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600 // node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) my_orphan_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') my_doa_count = sum(1 for share in node.tracker.get_chain( node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count / my_share_count if my_share_count != 0 else None my_work = sum( bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes) actual_time = ( node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash( node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s / (1 - global_stale_prop), actual=share_att_s / (1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count / my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count / my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, efficiency_if_miner_perfect=(1 - stale_orphan_shares / shares) / (1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares + stale_doa_shares) / shares) / (1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares, ), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts( node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy'] * 1e-6, warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value), donation_proportion=wb.donation_percentage / 100, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child, )) @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = yield self.func(*self.args) defer.returnValue( json.dumps(res) if self.mime_type == 'application/json' else res) def decent_height(): return min(node.tracker.get_height(node.best_share_var.value), 720) web_root.putChild( 'rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, decent_height( )) / (1 - p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, decent_height())))) web_root.putChild( 'difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty( node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild( 'user_stales', WebInterface(lambda: dict((bitcoin_data.script2_to_address( ph, node.net.PARENT ), prop) for ph, prop in p2pool_data.get_user_stale_props( node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) web_root.putChild( 'current_payouts', WebInterface(lambda: dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value / 1e6) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild( 'peer_addresses', WebInterface(lambda: [ '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer(). port) for peer in node.p2p_node.peers.itervalues() ])) web_root.putChild( 'peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer( ).host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild( 'pings', WebInterface( defer.inlineCallbacks(lambda: defer.returnValue( dict([(a, (yield b)) for a, b in [( '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks(lambda peer=peer: defer.returnValue( min([(yield peer.do_ping().addCallback( lambda x: x / 0.001).addErrback(lambda fail: None)) for i in xrange(3)])))()) for peer in list( node.p2p_node.peers.itervalues())]]))))) web_root.putChild( 'peer_versions', WebInterface(lambda: dict( ('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild( 'payout_addr', WebInterface(lambda: bitcoin_data.pubkey_to_address( wb.my_pubkey, node.net.PARENT))) web_root.putChild( 'recent_blocks', WebInterface(lambda: [ dict( ts=s.timestamp, hash='%064x' % s.header_hash, number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None, share='%064x' % s.hash, ) for s in node.tracker.get_chain( node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24 * 60 * 60 // node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target ])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) web_root.putChild( 'stale_rates', WebInterface( lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24 * 60 * 60: stat_log.pop(0) lookbehind = 3600 // node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = get_local_rates() stat_log.append( dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, lookbehind) / (1 - global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=node.get_current_txouts().get( bitcoin_data.pubkey_to_script2(wb.my_pubkey), 0) * 1e-6, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts( node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts( node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy'] * 1e-6, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) x = deferral.RobustLoopingCall(update_stat_log) x.start(5 * 60) stop_event.watch(x.stop) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, children=[ '%064x' % x for x in sorted( node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set()))) ], # sorted from most children to least children type_name=type(share).__name__, local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address( share.new_script, node.net.PARENT), donation=share.share_data['donation'] / 65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust( 2, '\x00').encode('hex'), value=share.share_data['subsidy'] * 1e-6, ), txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [ 2**len(share.merkle_link['branch']) // 2 + 1, 2**len(share.merkle_link['branch']) ], ), ) new_root.putChild( 'share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild( 'heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) new_root.putChild( 'verified_heads', WebInterface( lambda: ['%064x' % x for x in node.tracker.verified.heads])) new_root.putChild( 'tails', WebInterface(lambda: [ '%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set()) ])) new_root.putChild( 'verified_tails', WebInterface(lambda: [ '%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set()) ])) new_root.putChild( 'best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return '' share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share1a()) new_root.putChild( 'share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild( 'currency_info', WebInterface(lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT. BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT. ADDRESS_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60 * 60), 'last_day': graph.DataViewDescription(300, 60 * 60 * 24), 'last_week': graph.DataViewDescription(300, 60 * 60 * 24 * 7), 'last_month': graph.DataViewDescription(300, 60 * 60 * 24 * 30), 'last_year': graph.DataViewDescription(300, 60 * 60 * 24 * 365.25), } def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj): if not obj: last_bin_end = 0 bins = dv_desc.bin_count * [{}] else: pool_rates = obj['pool_rates'][dv_name] desired_versions = obj['desired_versions'][dv_name] def get_total_pool_rate(t): n = int((pool_rates['last_bin_end'] - t) / dv_desc.bin_width) if n < 0 or n >= dv_desc.bin_count: return None total = sum(x[0] for x in pool_rates['bins'][n].values()) count = math.mean(x[1] for x in pool_rates['bins'][n].values()) if count == 0: return None return total / count last_bin_end = desired_versions['last_bin_end'] bins = [ dict((name, (total * get_total_pool_rate(last_bin_end - (i + 1 / 2) * dv_desc.bin_width), count)) for name, ( total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count) ] return graph.DataView(dv_desc, ds_desc, last_bin_end, bins) hd = graph.HistoryDatabase.from_obj( { 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), 'incoming_peers': graph.DataStreamDescription(dataview_descriptions), 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions), 'miner_hash_rates': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True, default_func=build_desired_rates), 'traffic_rate': graph.DataStreamDescription( dataview_descriptions, is_gauge=False, multivalues=True), 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), 'memory_usage': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) x = deferral.RobustLoopingCall( lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) x.start(100) stop_event.watch(x.stop) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum( t, {user: work}) @wb.share_received.watch def _(work, dead): t = time.time() hd.datastreams['local_share_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60 * 60 // node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams['current_payout'].add_datum( t, current_txouts.get(bitcoin_data.pubkey_to_script2(wb.my_pubkey), 0) * 1e-6) miner_hash_rates, miner_dead_hash_rates = get_local_rates() current_txouts_by_address = dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum( t, dict((user, current_txouts_by_address[user] * 1e-6) for user in miner_hash_rates if user in current_txouts_by_address)) hd.datastreams['incoming_peers'].add_datum( t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming)) hd.datastreams['outgoing_peers'].add_datum( t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming)) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_versions'].add_datum( t, dict((str(k), v / vs_total) for k, v in vs.iteritems())) hd.datastreams['desired_version_rates'].add_datum( t, dict((str(k), v / vs_total * pool_total) for k, v in vs.iteritems())) try: hd.datastreams['memory_usage'].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc() x = deferral.RobustLoopingCall(add_point) x.start(5) stop_event.watch(x.stop) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild( 'graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[ view].get_data(time.time()))) web_root.putChild( 'static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static'))) return web_root
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) yield helper.check(bitcoind, net) temp_work = yield helper.getwork(bitcoind) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (temp_work['height'] - 1,) print if not args.testnet: factory = yield connect_p2p() print 'Determining payout address...' pubkey_path = os.path.join(datadir_path, 'cached_payout_pubkey') if os.path.exists(pubkey_path): with open(pubkey_path, 'rb') as f: pubkey = f.read().strip('\r\n') print ' Loaded cached pubkey, payout address: %s...' % (bitcoin_data.pubkey_to_address(pubkey.decode('hex'), net.PARENT),) else: pubkey = None if pubkey is not None: res = yield deferral.retry('Error validating cached pubkey:', 5)(lambda: bitcoind.rpc_validatepubkey(pubkey))() if not res['isvalid'] or not res['ismine']: print ' Cached pubkey is either invalid or not controlled by local bitcoind!' address = None if pubkey is None: print ' Getting payout pubkey from bitcoind...' pubkey = yield deferral.retry('Error getting payout pubkey from bitcoind:', 5)(lambda: bitcoind.rpc_getnewpubkey('p2pool'))() with open(pubkey_path, 'wb') as f: f.write(pubkey) my_pubkey = pubkey.decode('hex') address = bitcoin_data.pubkey_to_address(my_pubkey, net.PARENT) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) shares = {} known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': contents.time_seen = 0 shares[contents.hash] = contents if len(shares) % 1000 == 0 and shares: print " %i" % (len(shares),) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print print 'Initializing work...' node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) yield node.start() for share_hash in shares: if share_hash not in node.tracker.items: ss.forget_share(share_hash) for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) del shares, known_verified node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) def save_shares(): for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in node.tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) print ' ...success!' print print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() node.p2p_node = p2pool_node.P2PNode(node, port=args.p2pool_port, max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, desired_outgoing_conns=args.p2pool_outgoing_conns, ) node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(node.p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) wb = work.WorkerBridge(node, my_pubkey, args.donation_percentage, merged_urls, args.worker_fee) web_root = web.get_web_root(wb, datadir_path, bitcoind_warning_var) caching_wb = worker_interface.CachingWorkerBridge(wb) worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/')) web_serverfactory = server.Site(web_root) serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = node.tracker.verified.added.watch(new_share) self.recent_messages = [] def joined(self, channel): self.in_channel = True def left(self, channel): self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / node.tracker.items[node.best_share_var.value].max_target / my_att_s) if my_att_s and node.best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), node.get_current_txouts().get(bitcoin_data.pubkey_to_script2(my_pubkey), 0) * 1e-6, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_warning_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def get_web_root(wb, datadir_path, bitcoind_warning_var, stop_event=variable.Event()): node = wb.node start_time = time.time() web_root = resource.Resource() def get_users(sdfsdf): height, last = node.tracker.get_height_and_last(node.best_share_var.value) weights, total_weight, donation_weight = node.tracker.get_cumulative_weights(node.best_share_var.value, min(height, 720), 65535*2**256) res = {} print "test" for script in sorted(weights, key=lambda s: weights[s]): print script res[bitcoin_data.script2_to_address(script, node.net.PARENT)] = weights[script]/total_weight return res def get_current_scaled_txouts(scale, trunc=0): txouts = node.get_current_txouts() total = sum(txouts.itervalues()) results = dict((script, value*scale//total) for script, value in txouts.iteritems()) if trunc > 0: total_random = 0 random_set = set() for s in sorted(results, key=results.__getitem__): if results[s] >= trunc: break total_random += results[s] random_set.add(s) if total_random: winner = math.weighted_choice((script, results[script]) for script in random_set) for script in random_set: del results[script] results[winner] = total_random if sum(results.itervalues()) < int(scale): results[math.weighted_choice(results.iteritems())] += int(scale) - sum(results.itervalues()) return results def get_patron_sendmany(total=None, trunc='0.01'): if total is None: return 'need total argument. go to patron_sendmany/<TOTAL>' total = int(float(total)*1e8) trunc = int(float(trunc)*1e8) return json.dumps(dict( (bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e6) for script, value in get_current_scaled_txouts(total, trunc).iteritems() if bitcoin_data.script2_to_address(script, node.net.PARENT) is not None )) def get_local_rates(): miner_hash_rates = {} miner_dead_hash_rates = {} datums, dt = wb.local_rate_monitor.get_datums_in_last() for datum in datums: miner_hash_rates[datum['user']] = miner_hash_rates.get(datum['user'], 0) + datum['work']/dt if datum['dead']: miner_dead_hash_rates[datum['user']] = miner_dead_hash_rates.get(datum['user'], 0) + datum['work']/dt return miner_hash_rates, miner_dead_hash_rates def get_global_stats(): # averaged over last hour if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) nonstale_hash_rate = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind) stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) return dict( pool_nonstale_hash_rate=nonstale_hash_rate, pool_hash_rate=nonstale_hash_rate/(1 - stale_prop), pool_stale_prop=stale_prop, min_difficulty=bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target), ) def get_local_stats(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.tracker.get_height(node.best_share_var.value), 3600//node.net.SHARE_PERIOD) global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) my_unstale_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes) my_orphan_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'orphan') my_doa_count = sum(1 for share in node.tracker.get_chain(node.best_share_var.value, lookbehind) if share.hash in wb.my_share_hashes and share.share_data['stale_info'] == 'doa') my_share_count = my_unstale_count + my_orphan_count + my_doa_count my_stale_count = my_orphan_count + my_doa_count my_stale_prop = my_stale_count/my_share_count if my_share_count != 0 else None my_work = sum(bitcoin_data.target_to_average_attempts(share.target) for share in node.tracker.get_chain(node.best_share_var.value, lookbehind - 1) if share.hash in wb.my_share_hashes) actual_time = (node.tracker.items[node.best_share_var.value].timestamp - node.tracker.items[node.tracker.get_nth_parent_hash(node.best_share_var.value, lookbehind - 1)].timestamp) share_att_s = my_work / actual_time miner_hash_rates, miner_dead_hash_rates = get_local_rates() (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() return dict( my_hash_rates_in_last_hour=dict( note="DEPRECATED", nonstale=share_att_s, rewarded=share_att_s/(1 - global_stale_prop), actual=share_att_s/(1 - my_stale_prop) if my_stale_prop is not None else 0, # 0 because we don't have any shares anyway ), my_share_counts_in_last_hour=dict( shares=my_share_count, unstale_shares=my_unstale_count, stale_shares=my_stale_count, orphan_stale_shares=my_orphan_count, doa_stale_shares=my_doa_count, ), my_stale_proportions_in_last_hour=dict( stale=my_stale_prop, orphan_stale=my_orphan_count/my_share_count if my_share_count != 0 else None, dead_stale=my_doa_count/my_share_count if my_share_count != 0 else None, ), miner_hash_rates=miner_hash_rates, miner_dead_hash_rates=miner_dead_hash_rates, efficiency_if_miner_perfect=(1 - stale_orphan_shares/shares)/(1 - global_stale_prop) if shares else None, # ignores dead shares because those are miner's fault and indicated by pseudoshare rejection efficiency=(1 - (stale_orphan_shares+stale_doa_shares)/shares)/(1 - global_stale_prop) if shares else None, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), shares=dict( total=shares, orphan=stale_orphan_shares, dead=stale_doa_shares, ), uptime=time.time() - start_time, attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-6, warnings=p2pool_data.get_warnings(node.tracker, node.best_share_var.value, node.net, bitcoind_warning_var.value, node.bitcoind_work.value), donation_proportion=wb.donation_percentage/100, ) class WebInterface(deferred_resource.DeferredResource): def __init__(self, func, mime_type='application/json', args=()): deferred_resource.DeferredResource.__init__(self) self.func, self.mime_type, self.args = func, mime_type, args def getChild(self, child, request): return WebInterface(self.func, self.mime_type, self.args + (child,)) @defer.inlineCallbacks def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) request.setHeader('Access-Control-Allow-Origin', '*') res = yield self.func(*self.args) defer.returnValue(json.dumps(res) if self.mime_type == 'application/json' else res) def decent_height(): return min(node.tracker.get_height(node.best_share_var.value), 720) web_root.putChild('rate', WebInterface(lambda: p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, decent_height())/(1-p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, decent_height())))) web_root.putChild('difficulty', WebInterface(lambda: bitcoin_data.target_to_difficulty(node.tracker.items[node.best_share_var.value].max_target))) web_root.putChild('users', WebInterface(get_users)) web_root.putChild('user_stales', WebInterface(lambda: dict((bitcoin_data.script2_to_address(ph, node.net.PARENT), prop) for ph, prop in p2pool_data.get_user_stale_props(node.tracker, node.best_share_var.value, node.tracker.get_height(node.best_share_var.value)).iteritems()))) web_root.putChild('fee', WebInterface(lambda: wb.worker_fee)) web_root.putChild('current_payouts', WebInterface(lambda: dict((bitcoin_data.script2_to_address(script, node.net.PARENT), value/1e6) for script, value in node.get_current_txouts().iteritems()))) web_root.putChild('patron_sendmany', WebInterface(get_patron_sendmany, 'text/plain')) web_root.putChild('global_stats', WebInterface(get_global_stats)) web_root.putChild('local_stats', WebInterface(get_local_stats)) web_root.putChild('peer_addresses', WebInterface(lambda: ['%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port) for peer in node.p2p_node.peers.itervalues()])) web_root.putChild('peer_txpool_sizes', WebInterface(lambda: dict(('%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), peer.remembered_txs_size) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('pings', WebInterface(defer.inlineCallbacks(lambda: defer.returnValue( dict([(a, (yield b)) for a, b in [( '%s:%i' % (peer.transport.getPeer().host, peer.transport.getPeer().port), defer.inlineCallbacks(lambda peer=peer: defer.returnValue( min([(yield peer.do_ping().addCallback(lambda x: x/0.001).addErrback(lambda fail: None)) for i in xrange(3)]) ))() ) for peer in list(node.p2p_node.peers.itervalues())] ]) )))) web_root.putChild('peer_versions', WebInterface(lambda: dict(('%s:%i' % peer.addr, peer.other_sub_version) for peer in node.p2p_node.peers.itervalues()))) web_root.putChild('payout_addr', WebInterface(lambda: bitcoin_data.pubkey_to_address(wb.my_pubkey, node.net.PARENT))) web_root.putChild('recent_blocks', WebInterface(lambda: [dict( ts=s.timestamp, hash='%064x' % s.header_hash, number=pack.IntType(24).unpack(s.share_data['coinbase'][1:4]) if len(s.share_data['coinbase']) >= 4 else None, share='%064x' % s.hash, ) for s in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 24*60*60//node.net.SHARE_PERIOD)) if s.pow_hash <= s.header['bits'].target])) web_root.putChild('uptime', WebInterface(lambda: time.time() - start_time)) web_root.putChild('stale_rates', WebInterface(lambda: p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, decent_height(), rates=True))) new_root = resource.Resource() web_root.putChild('web', new_root) stat_log = [] if os.path.exists(os.path.join(datadir_path, 'stats')): try: with open(os.path.join(datadir_path, 'stats'), 'rb') as f: stat_log = json.loads(f.read()) except: log.err(None, 'Error loading stats:') def update_stat_log(): while stat_log and stat_log[0]['time'] < time.time() - 24*60*60: stat_log.pop(0) lookbehind = 3600//node.net.SHARE_PERIOD if node.tracker.get_height(node.best_share_var.value) < lookbehind: return None global_stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, lookbehind) (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() miner_hash_rates, miner_dead_hash_rates = get_local_rates() stat_log.append(dict( time=time.time(), pool_hash_rate=p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, lookbehind)/(1-global_stale_prop), pool_stale_prop=global_stale_prop, local_hash_rates=miner_hash_rates, local_dead_hash_rates=miner_dead_hash_rates, shares=shares, stale_shares=stale_orphan_shares + stale_doa_shares, stale_shares_breakdown=dict(orphan=stale_orphan_shares, doa=stale_doa_shares), current_payout=node.get_current_txouts().get(bitcoin_data.pubkey_to_script2(wb.my_pubkey), 0)*1e-6, peers=dict( incoming=sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), outgoing=sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming), ), attempts_to_share=bitcoin_data.target_to_average_attempts(node.tracker.items[node.best_share_var.value].max_target), attempts_to_block=bitcoin_data.target_to_average_attempts(node.bitcoind_work.value['bits'].target), block_value=node.bitcoind_work.value['subsidy']*1e-6, )) with open(os.path.join(datadir_path, 'stats'), 'wb') as f: f.write(json.dumps(stat_log)) x = task.LoopingCall(update_stat_log) x.start(5*60) stop_event.watch(x.stop) new_root.putChild('log', WebInterface(lambda: stat_log)) def get_share(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return None share = node.tracker.items[int(share_hash_str, 16)] return dict( parent='%064x' % share.previous_hash, children=['%064x' % x for x in sorted(node.tracker.reverse.get(share.hash, set()), key=lambda sh: -len(node.tracker.reverse.get(sh, set())))], # sorted from most children to least children type_name=type(share).__name__, local=dict( verified=share.hash in node.tracker.verified.items, time_first_seen=start_time if share.time_seen == 0 else share.time_seen, peer_first_received_from=share.peer_addr, ), share_data=dict( timestamp=share.timestamp, target=share.target, max_target=share.max_target, payout_address=bitcoin_data.script2_to_address(share.new_script, node.net.PARENT), donation=share.share_data['donation']/65535, stale_info=share.share_data['stale_info'], nonce=share.share_data['nonce'], desired_version=share.share_data['desired_version'], ), block=dict( hash='%064x' % share.header_hash, header=dict( version=share.header['version'], previous_block='%064x' % share.header['previous_block'], merkle_root='%064x' % share.header['merkle_root'], timestamp=share.header['timestamp'], target=share.header['bits'].target, nonce=share.header['nonce'], ), gentx=dict( hash='%064x' % share.gentx_hash, coinbase=share.share_data['coinbase'].ljust(2, '\x00').encode('hex'), value=share.share_data['subsidy']*1e-6, ), txn_count_range=[len(share.other_txs), len(share.other_txs)] if share.other_txs is not None else 1 if len(share.merkle_link['branch']) == 0 else [2**len(share.merkle_link['branch'])//2+1, 2**len(share.merkle_link['branch'])], ), ) new_root.putChild('share', WebInterface(lambda share_hash_str: get_share(share_hash_str))) new_root.putChild('heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.heads])) new_root.putChild('verified_heads', WebInterface(lambda: ['%064x' % x for x in node.tracker.verified.heads])) new_root.putChild('tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.tails for x in node.tracker.reverse.get(t, set())])) new_root.putChild('verified_tails', WebInterface(lambda: ['%064x' % x for t in node.tracker.verified.tails for x in node.tracker.verified.reverse.get(t, set())])) new_root.putChild('best_share_hash', WebInterface(lambda: '%064x' % node.best_share_var.value)) def get_share_data(share_hash_str): if int(share_hash_str, 16) not in node.tracker.items: return '' share = node.tracker.items[int(share_hash_str, 16)] return p2pool_data.share_type.pack(share.as_share1a()) new_root.putChild('share_data', WebInterface(lambda share_hash_str: get_share_data(share_hash_str), 'application/octet-stream')) new_root.putChild('currency_info', WebInterface(lambda: dict( symbol=node.net.PARENT.SYMBOL, block_explorer_url_prefix=node.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, address_explorer_url_prefix=node.net.PARENT.ADDRESS_EXPLORER_URL_PREFIX, ))) new_root.putChild('version', WebInterface(lambda: p2pool.__version__)) hd_path = os.path.join(datadir_path, 'graph_db') hd_data = _atomic_read(hd_path) hd_obj = {} if hd_data is not None: try: hd_obj = json.loads(hd_data) except Exception: log.err(None, 'Error reading graph database:') dataview_descriptions = { 'last_hour': graph.DataViewDescription(150, 60*60), 'last_day': graph.DataViewDescription(300, 60*60*24), 'last_week': graph.DataViewDescription(300, 60*60*24*7), 'last_month': graph.DataViewDescription(300, 60*60*24*30), 'last_year': graph.DataViewDescription(300, 60*60*24*365.25), } def build_desired_rates(ds_name, ds_desc, dv_name, dv_desc, obj): if not obj: last_bin_end = 0 bins = dv_desc.bin_count*[{}] else: pool_rates = obj['pool_rates'][dv_name] desired_versions = obj['desired_versions'][dv_name] def get_total_pool_rate(t): n = int((pool_rates['last_bin_end'] - t)/dv_desc.bin_width) if n < 0 or n >= dv_desc.bin_count: return None total = sum(x[0] for x in pool_rates['bins'][n].values()) count = math.mean(x[1] for x in pool_rates['bins'][n].values()) if count == 0: return None return total/count last_bin_end = desired_versions['last_bin_end'] bins = [dict((name, (total*get_total_pool_rate(last_bin_end - (i+1/2)*dv_desc.bin_width), count)) for name, (total, count) in desired_versions['bins'][i].iteritems()) for i in xrange(dv_desc.bin_count)] return graph.DataView(dv_desc, ds_desc, last_bin_end, bins) hd = graph.HistoryDatabase.from_obj({ 'local_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'local_dead_share_hash_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False), 'pool_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'current_payout': graph.DataStreamDescription(dataview_descriptions), 'current_payouts': graph.DataStreamDescription(dataview_descriptions, multivalues=True), 'incoming_peers': graph.DataStreamDescription(dataview_descriptions), 'outgoing_peers': graph.DataStreamDescription(dataview_descriptions), 'miner_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'miner_dead_hash_rates': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'desired_versions': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True), 'desired_version_rates': graph.DataStreamDescription(dataview_descriptions, multivalues=True, multivalue_undefined_means_0=True, default_func=build_desired_rates), 'traffic_rate': graph.DataStreamDescription(dataview_descriptions, is_gauge=False, multivalues=True), 'getwork_latency': graph.DataStreamDescription(dataview_descriptions), 'memory_usage': graph.DataStreamDescription(dataview_descriptions), }, hd_obj) x = task.LoopingCall(lambda: _atomic_write(hd_path, json.dumps(hd.to_obj()))) x.start(100) stop_event.watch(x.stop) @wb.pseudoshare_received.watch def _(work, dead, user): t = time.time() hd.datastreams['local_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_hash_rate'].add_datum(t, work) if user is not None: hd.datastreams['miner_hash_rates'].add_datum(t, {user: work}) if dead: hd.datastreams['miner_dead_hash_rates'].add_datum(t, {user: work}) @wb.share_received.watch def _(work, dead): t = time.time() hd.datastreams['local_share_hash_rate'].add_datum(t, work) if dead: hd.datastreams['local_dead_share_hash_rate'].add_datum(t, work) @node.p2p_node.traffic_happened.watch def _(name, bytes): hd.datastreams['traffic_rate'].add_datum(time.time(), {name: bytes}) def add_point(): if node.tracker.get_height(node.best_share_var.value) < 10: return None lookbehind = min(node.net.CHAIN_LENGTH, 60*60//node.net.SHARE_PERIOD, node.tracker.get_height(node.best_share_var.value)) t = time.time() pool_rates = p2pool_data.get_stale_counts(node.tracker, node.best_share_var.value, lookbehind, rates=True) pool_total = sum(pool_rates.itervalues()) hd.datastreams['pool_rates'].add_datum(t, pool_rates) current_txouts = node.get_current_txouts() hd.datastreams['current_payout'].add_datum(t, current_txouts.get(bitcoin_data.pubkey_to_script2(wb.my_pubkey), 0)*1e-6) miner_hash_rates, miner_dead_hash_rates = get_local_rates() current_txouts_by_address = dict((bitcoin_data.script2_to_address(script, node.net.PARENT), amount) for script, amount in current_txouts.iteritems()) hd.datastreams['current_payouts'].add_datum(t, dict((user, current_txouts_by_address[user]*1e-6) for user in miner_hash_rates if user in current_txouts_by_address)) hd.datastreams['incoming_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming)) hd.datastreams['outgoing_peers'].add_datum(t, sum(1 for peer in node.p2p_node.peers.itervalues() if not peer.incoming)) vs = p2pool_data.get_desired_version_counts(node.tracker, node.best_share_var.value, lookbehind) vs_total = sum(vs.itervalues()) hd.datastreams['desired_versions'].add_datum(t, dict((str(k), v/vs_total) for k, v in vs.iteritems())) hd.datastreams['desired_version_rates'].add_datum(t, dict((str(k), v/vs_total*pool_total) for k, v in vs.iteritems())) try: hd.datastreams['memory_usage'].add_datum(t, memory.resident()) except: if p2pool.DEBUG: traceback.print_exc() x = task.LoopingCall(add_point) x.start(5) stop_event.watch(x.stop) @node.bitcoind_work.changed.watch def _(new_work): hd.datastreams['getwork_latency'].add_datum(time.time(), new_work['latency']) new_root.putChild('graph_data', WebInterface(lambda source, view: hd.datastreams[source].dataviews[view].get_data(time.time()))) web_root.putChild('static', static.File(os.path.join(os.path.dirname(sys.argv[0]), 'web-static'))) return web_root