def download_shares(): while True: desired = yield desired_var.get_when_satisfies( lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % ( p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep( 1 ) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer)
def download_shares(): while True: desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer)
def new_share(share): if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message)
def work2_thread(): while True: try: set_real_work2() except: log.err() yield deferral.sleep(random.expovariate(1/20))
def work2_thread(): while True: try: set_real_work2() except: log.err() yield deferral.sleep(random.expovariate(1 / 20))
def set_merged_work(merged_url, merged_userpass, merged_address): merged_proxy = jsonrpc.HTTPProxy( merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) while self.running: if merged_address: auxblock = yield deferral.retry( 'Error while calling merged createauxblock on %s:' % (merged_url, ), 30)(merged_proxy.rpc_createauxblock)(merged_address) else: auxblock = yield deferral.retry( 'Error while calling merged getauxblock on %s:' % (merged_url, ), 30)(merged_proxy.rpc_getauxblock)() target = auxblock[ 'target'] if 'target' in auxblock else auxblock['_target'] self.merged_work.set( math.merge_dicts( self.merged_work.value, { auxblock['chainid']: dict( hash=int(auxblock['hash'], 16), target='p2pool' if target == 'p2pool' else pack.IntType(256).unpack(target.decode('hex')), merged_proxy=merged_proxy, merged_address=merged_address, ) })) yield deferral.sleep(1)
def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
def work1_thread(): while True: flag = work_updated.get_deferred() try: yield set_real_work1() except: log.err() yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True)
def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) print(node.bitcoind_work.value['bits']) print(real_att_s) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err()
def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), ### CJWinty: dimecoin has only 5 digits node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-5, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.dimecoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, dimecoind_warning_var.value, node.dimecoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err()
def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message)
def set_merged_work(merged_url, merged_userpass): merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) while self.running: auxblock = yield deferral.retry('Error while calling merged getauxblock on %s:' % (merged_url,), 30)(merged_proxy.rpc_getauxblock)() self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict( hash=int(auxblock['hash'], 16), target='p2pool' if auxblock['target'] != 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')), merged_proxy=merged_proxy, )})) yield deferral.sleep(1)
def set_merged_work(merged_url, merged_userpass): merged_proxy = jsonrpc.HTTPProxy(merged_url, dict(Authorization='Basic ' + base64.b64encode(merged_userpass))) while self.running: auxblock = yield deferral.retry('Error while calling merged getauxblock:', 30)(merged_proxy.rpc_getauxblock)() self.merged_work.set(math.merge_dicts(self.merged_work.value, {auxblock['chainid']: dict( hash=int(auxblock['hash'], 16), target='p2pool' if auxblock['target'] == 'p2pool' else pack.IntType(256).unpack(auxblock['target'].decode('hex')), merged_proxy=merged_proxy, )})) yield deferral.sleep(1)
def work1_thread(): while True: flag = work_updated.get_deferred() try: yield set_real_work1() except: log.err() yield defer.DeferredList( [flag, deferral.sleep(random.uniform(1, 10))], fireOnOneCallback=True)
def set_merged_work(): if not args.merged_url: return merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass,)) while True: auxblock = yield deferral.retry('Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)() pre_merged_work.set(dict( hash=int(auxblock['hash'], 16), target=bitcoin_data.HashType().unpack(auxblock['target'].decode('hex')), chain_id=auxblock['chainid'], )) yield deferral.sleep(1)
def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set( (yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True)
def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if not is_lan: continue pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP') except: if p2pool_init.DEBUG: log.err() yield deferral.sleep(random.expovariate(1/120))
def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120))
def set_merged_work(): if not args.merged_url: return merged = jsonrpc.Proxy(args.merged_url, (args.merged_userpass, )) while True: auxblock = yield deferral.retry( 'Error while calling merged getauxblock:', 1)(merged.rpc_getauxblock)() pre_merged_work.set( dict( hash=int(auxblock['hash'], 16), target=bitcoin_data.HashType().unpack( auxblock['target'].decode('hex')), chain_id=auxblock['chainid'], )) yield deferral.sleep(1)
def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: if time.time() > current_work2.value['last_update'] + 60: print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---''' if current_work.value['best_share_hash'] is not None: height, last = tracker.get_height_and_last(current_work.value['best_share_hash']) if height > 2: att_s = p2pool_data.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], min(height - 1, 720)) weights, total_weight, donation_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 65535*2**256) shares, stale_doa_shares, stale_not_doa_shares = get_share_counts(True) stale_shares = stale_doa_shares + stale_not_doa_shares fracs = [share.stale_frac for share in tracker.get_chain(current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None] this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % ( math.format(int(att_s / (1. - (math.median(fracs) if fracs else 0)))), height, len(tracker.verified.shares), len(tracker.shares), weights.get(my_script, 0)/total_weight*100, math.format(int(weights.get(my_script, 0)*att_s//total_weight / (1. - (math.median(fracs) if fracs else 0)))), shares, stale_not_doa_shares, stale_doa_shares, len(p2p_node.peers), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') if fracs: med = math.median(fracs) this_str += '\nPool stales: %i%%' % (int(100*med+.5),) conf = 0.95 if shares: this_str += u' Own: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius(math.binomial_conf_interval(stale_shares, shares, conf))) if med < .99: this_str += u' Own efficiency: %i±%i%%' % tuple(int(100*x+.5) for x in math.interval_to_center_radius((1 - y)/(1 - med) for y in math.binomial_conf_interval(stale_shares, shares, conf)[::-1])) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err()
def set_merged_work(merged_url, merged_userpass): merged_proxy = jsonrpc.Proxy(merged_url, dict(Authorization="Basic " + base64.b64encode(merged_userpass))) while self.running: auxblock = yield deferral.retry("Error while calling merged getauxblock:", 30)( merged_proxy.rpc_getauxblock )() self.merged_work.set( dict( self.merged_work.value, **{ auxblock["chainid"]: dict( hash=int(auxblock["hash"], 16), target="p2pool" if auxblock["target"] == "p2pool" else pack.IntType(256).unpack(auxblock["target"].decode("hex")), merged_proxy=merged_proxy, ) } ) ) yield deferral.sleep(1)
def main(args): try: if args.charts: from . import draw print 'p2pool (version %s)' % (p2pool_init.__version__,) print # connect to bitcoind over JSON-RPC and do initial getwork url = 'http://%s:%i/' % (args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.Proxy(url, (args.bitcoind_rpc_username, args.bitcoind_rpc_password)) temp_work, temp_height = yield getwork(bitcoind) print ' ...success!' print ' Current block hash: %x height: %i' % (temp_work.previous_block, temp_height) print # connect to bitcoind over bitcoin-p2p and do checkorder to get pubkey to send payouts to print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin.p2p.ClientFactory(args.net) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) my_script = yield get_payout_script(factory) if args.pubkey_hash is None: if my_script is None: print ' IP transaction denied ... falling back to sending to address.' my_script = yield get_payout_script2(bitcoind, args.net) else: my_script = bitcoin.data.pubkey_hash_to_script2(args.pubkey_hash) print ' ...success!' print ' Payout script:', my_script.encode('hex') print ht = bitcoin.p2p.HeightTracker(factory) tracker = p2pool.OkayTracker(args.net) chains = expiring_dict.ExpiringDict(300) def get_chain(chain_id_data): return chains.setdefault(chain_id_data, Chain(chain_id_data)) peer_heads = expiring_dict.ExpiringDict(300) # hash -> peers that know of it # information affecting work that should trigger a long-polling update current_work = variable.Variable(None) # information affecting work that should not trigger a long-polling update current_work2 = variable.Variable(None) work_updated = variable.Event() requested = expiring_dict.ExpiringDict(300) @defer.inlineCallbacks def set_real_work1(): work, height = yield getwork(bitcoind) changed = work.previous_block != current_work.value['previous_block'] if current_work.value is not None else True current_work.set(dict( version=work.version, previous_block=work.previous_block, target=work.target, height=height, best_share_hash=current_work.value['best_share_hash'] if current_work.value is not None else None, )) current_work2.set(dict( clock_offset=time.time() - work.timestamp, )) if changed: set_real_work2() def set_real_work2(): best, desired = tracker.think(ht, current_work.value['previous_block'], time.time() - current_work2.value['clock_offset']) t = dict(current_work.value) t['best_share_hash'] = best current_work.set(t) t = time.time() for peer2, share_hash in desired: if share_hash not in tracker.tails: # was received in the time tracker.think was running continue last_request_time, count = requested.get(share_hash, (None, 0)) if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count: continue potential_peers = set() for head in tracker.tails[share_hash]: potential_peers.update(peer_heads.get(head, set())) potential_peers = [peer for peer in potential_peers if peer.connected2] if count == 0 and peer2 is not None and peer2.connected2: peer = peer2 else: peer = random.choice(potential_peers) if potential_peers and random.random() > .2 else peer2 if peer is None: continue print 'Requesting parent share %s from %s' % (p2pool.format_hash(share_hash), '%s:%i' % peer.addr) peer.send_getshares( hashes=[share_hash], parents=2000, stops=list(set(tracker.heads) | set( tracker.get_nth_parent_hash(head, min(max(0, tracker.get_height_and_last(head)[0] - 1), 10)) for head in tracker.heads ))[:100], ) requested[share_hash] = t, count + 1 print 'Initializing work...' yield set_real_work1() set_real_work2() print ' ...success!' start_time = time.time() - current_work2.value['clock_offset'] # setup p2p logic and join p2pool network def share_share(share, ignore_peer=None): for peer in p2p_node.peers.itervalues(): if peer is ignore_peer: continue #if p2pool_init.DEBUG: # print "Sending share %s to %r" % (p2pool.format_hash(share.hash), peer.addr) peer.send_shares([share]) share.flag_shared() def p2p_shares(shares, peer=None): if len(shares) > 5: print 'Processing %i shares...' % (len(shares),) some_new = False for share in shares: if share.hash in tracker.shares: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool.format_hash(share.hash),) continue some_new = True #print 'Received share %s from %r' % (p2pool.format_hash(share.hash), share.peer.addr if share.peer is not None else None) tracker.add(share) #for peer2, share_hash in desired: # print 'Requesting parent share %x' % (share_hash,) # peer2.send_getshares(hashes=[share_hash], parents=2000) if share.bitcoin_hash <= share.header['target']: print print 'GOT BLOCK! Passing to bitcoind! %s bitcoin: %x' % (p2pool.format_hash(share.hash), share.bitcoin_hash,) print if factory.conn.value is not None: factory.conn.value.send_block(block=share.as_block(tracker, args.net)) else: print 'No bitcoind connection! Erp!' if shares and peer is not None: peer_heads.setdefault(shares[0].hash, set()).add(peer) if some_new: set_real_work2() if len(shares) > 5: print '... done processing %i shares. Have: %i/~%i' % (len(shares), len(tracker.shares), 2*args.net.CHAIN_LENGTH) def p2p_share_hashes(share_hashes, peer): t = time.time() get_hashes = [] for share_hash in share_hashes: if share_hash in tracker.shares: continue last_request_time, count = requested.get(share_hash, (None, 0)) if last_request_time is not None and last_request_time - 5 < t < last_request_time + 10 * 1.5**count: continue print 'Got share hash, requesting! Hash: %s' % (p2pool.format_hash(share_hash),) get_hashes.append(share_hash) requested[share_hash] = t, count + 1 if share_hashes and peer is not None: peer_heads.setdefault(share_hashes[0], set()).add(peer) if get_hashes: peer.send_getshares(hashes=get_hashes, parents=0, stops=[]) def p2p_get_shares(share_hashes, parents, stops, peer): parents = min(parents, 1000//len(share_hashes)) stops = set(stops) shares = [] for share_hash in share_hashes: for share in itertools.islice(tracker.get_chain_known(share_hash), parents + 1): if share.hash in stops: break shares.append(share) peer.send_shares(shares, full=True) print 'Joining p2pool network using TCP port %i...' % (args.p2pool_port,) def parse(x): if ':' in x: ip, port = x.split(':') return ip, int(port) else: return x, args.net.P2P_PORT nodes = set([ ('72.14.191.28', args.net.P2P_PORT), ('62.204.197.159', args.net.P2P_PORT), ('142.58.248.28', args.net.P2P_PORT), ('94.23.34.145', args.net.P2P_PORT), ]) for host in [ 'p2pool.forre.st', 'dabuttonfactory.com', ]: try: nodes.add(((yield reactor.resolve(host)), args.net.P2P_PORT)) except: log.err(None, 'Error resolving bootstrap node IP:') p2p_node = p2p.Node( current_work=current_work, port=args.p2pool_port, net=args.net, addr_store=db.SQLiteDict(sqlite3.connect(os.path.join(os.path.dirname(sys.argv[0]), 'addrs.dat'), isolation_level=None), args.net.ADDRS_TABLE), mode=0 if args.low_bandwidth else 1, preferred_addrs=set(map(parse, args.p2pool_nodes)) | nodes, ) p2p_node.handle_shares = p2p_shares p2p_node.handle_share_hashes = p2p_share_hashes p2p_node.handle_get_shares = p2p_get_shares p2p_node.start() # send share when the chain changes to their chain def work_changed(new_work): #print 'Work changed:', new_work for share in tracker.get_chain_known(new_work['best_share_hash']): if share.shared: break share_share(share, share.peer) current_work.changed.watch(work_changed) print ' ...success!' print @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if not is_lan: continue pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.net.P2P_PORT, args.net.P2P_PORT, 'p2pool', 'TCP') except: if p2pool_init.DEBUG: log.err() yield deferral.sleep(random.expovariate(1/120)) if args.upnp: upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on port %i...' % (args.worker_port,) # setup worker logic merkle_root_to_transactions = expiring_dict.ExpiringDict(300) run_identifier = struct.pack('<Q', random.randrange(2**64)) def compute(state, payout_script): if payout_script is None: payout_script = my_script if state['best_share_hash'] is None and args.net.PERSIST: raise jsonrpc.Error(-12345, u'p2pool is downloading shares') pre_extra_txs = [tx for tx in tx_pool.itervalues() if tx.is_good()] pre_extra_txs = pre_extra_txs[:2**16 - 1] # merkle_branch limit extra_txs = [] size = 0 for tx in pre_extra_txs: this_size = len(bitcoin.data.tx_type.pack(tx.tx)) if size + this_size > 500000: break extra_txs.append(tx) size += this_size # XXX check sigops! # XXX assuming generate_tx is smallish here.. generate_tx = p2pool.generate_transaction( tracker=tracker, previous_share_hash=state['best_share_hash'], new_script=payout_script, subsidy=(50*100000000 >> (state['height'] + 1)//210000) + sum(tx.value_in - tx.value_out for tx in extra_txs), nonce=run_identifier + struct.pack('<Q', random.randrange(2**64)), block_target=state['target'], net=args.net, ) print 'Generating! Difficulty: %.06f Payout if block: %.6f BTC' % (0xffff*2**208/p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'], generate_tx['tx_outs'][-1]['value']*1e-8) #print 'Target: %x' % (p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'],) #, have', shares.count(my_script) - 2, 'share(s) in the current chain. Fee:', sum(tx.value_in - tx.value_out for tx in extra_txs)/100000000 transactions = [generate_tx] + [tx.tx for tx in extra_txs] merkle_root = bitcoin.data.merkle_hash(transactions) merkle_root_to_transactions[merkle_root] = transactions # will stay for 1000 seconds timestamp = int(time.time() - current_work2.value['clock_offset']) if state['best_share_hash'] is not None: timestamp2 = math.median((s.timestamp for s in itertools.islice(tracker.get_chain_to_root(state['best_share_hash']), 11)), use_float=False) + 1 if timestamp2 > timestamp: print 'Toff', timestamp2 - timestamp timestamp = timestamp2 target2 = p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'] times[p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['nonce']] = time.time() #print 'SENT', 2**256//p2pool.coinbase_type.unpack(generate_tx['tx_ins'][0]['script'])['share_data']['target'] return bitcoin.getwork.BlockAttempt(state['version'], state['previous_block'], merkle_root, timestamp, state['target'], target2) my_shares = set() times = {} def got_response(data): try: # match up with transactions header = bitcoin.getwork.decode_data(data) transactions = merkle_root_to_transactions.get(header['merkle_root'], None) if transactions is None: print '''Couldn't link returned work's merkle root with its transactions - should only happen if you recently restarted p2pool''' return False block = dict(header=header, txs=transactions) hash_ = bitcoin.data.block_header_type.hash256(block['header']) if hash_ <= block['header']['target'] or p2pool_init.DEBUG: print print 'GOT BLOCK! Passing to bitcoind! bitcoin: %x' % (hash_,) print if factory.conn.value is not None: factory.conn.value.send_block(block=block) else: print 'No bitcoind connection! Erp!' target = p2pool.coinbase_type.unpack(transactions[0]['tx_ins'][0]['script'])['share_data']['target'] if hash_ > target: print 'Worker submitted share with hash (%x) > target (%x)' % (hash_, target) return False share = p2pool.Share.from_block(block) my_shares.add(share.hash) print 'GOT SHARE! %s prev %s age %.2fs' % (p2pool.format_hash(share.hash), p2pool.format_hash(share.previous_hash), time.time() - times[share.nonce]) + (' DEAD ON ARRIVAL' if share.previous_hash != current_work.value['best_share_hash'] else '') good = share.previous_hash == current_work.value['best_share_hash'] # maybe revert back to tracker being non-blocking so 'good' can be more accurate? p2p_shares([share]) # eg. good = share.hash == current_work.value['best_share_hash'] here return good except: log.err(None, 'Error processing data received from worker:') return False web_root = worker_interface.WorkerInterface(current_work, compute, got_response, args.net) def get_rate(): if current_work.value['best_share_hash'] is not None: height, last = tracker.get_height_and_last(current_work.value['best_share_hash']) att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net, min(height, 720)) return json.dumps(att_s) return json.dumps(None) def get_users(): height, last = tracker.get_height_and_last(current_work.value['best_share_hash']) weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 720), 2**256) res = {} for script in sorted(weights, key=lambda s: weights[s]): res[script.encode('hex')] = weights[script]/total_weight return json.dumps(res) class WebInterface(resource.Resource): def __init__(self, func, mime_type): self.func, self.mime_type = func, mime_type def render_GET(self, request): request.setHeader('Content-Type', self.mime_type) return self.func() web_root.putChild('rate', WebInterface(get_rate, 'application/json')) web_root.putChild('users', WebInterface(get_users, 'application/json')) if args.charts: web_root.putChild('chain_img', WebInterface(lambda: draw.get(tracker, current_work.value['best_share_hash']), 'image/png')) reactor.listenTCP(args.worker_port, server.Site(web_root)) print ' ...success!' print # done! tx_pool = expiring_dict.ExpiringDict(600, get_touches=False) # hash -> tx get_raw_transaction = deferral.DeferredCacher(lambda tx_hash: bitcoind.rpc_getrawtransaction('%x' % tx_hash), expiring_dict.ExpiringDict(100)) class Tx(object): def __init__(self, tx, seen_at_block): self.hash = bitcoin.data.tx_type.hash256(tx) self.tx = tx self.seen_at_block = seen_at_block self.mentions = set([bitcoin.data.tx_type.hash256(tx)] + [tx_in['previous_output']['hash'] for tx_in in tx['tx_ins']]) #print #print '%x %r' % (seen_at_block, tx) #for mention in self.mentions: # print '%x' % mention #print self.parents_all_in_blocks = False self.value_in = 0 #print self.tx self.value_out = sum(txout['value'] for txout in self.tx['tx_outs']) self._find_parents_in_blocks() @defer.inlineCallbacks def _find_parents_in_blocks(self): for tx_in in self.tx['tx_ins']: try: raw_transaction = yield get_raw_transaction(tx_in['previous_output']['hash']) except Exception: return self.value_in += raw_transaction['tx']['txouts'][tx_in['previous_output']['index']]['value'] #print raw_transaction if not raw_transaction['parent_blocks']: return self.parents_all_in_blocks = True def is_good(self): if not self.parents_all_in_blocks: return False x = self.is_good2() #print 'is_good:', x return x @defer.inlineCallbacks def new_tx(tx_hash): try: assert isinstance(tx_hash, (int, long)) #print 'REQUESTING', tx_hash tx = yield (yield factory.getProtocol()).get_tx(tx_hash) #print 'GOT', tx tx_pool[bitcoin.data.tx_type.hash256(tx)] = Tx(tx, current_work.value['previous_block']) except: log.err(None, 'Error handling tx:') # disable for now, for testing impact on stales #factory.new_tx.watch(new_tx) def new_block(block_hash): work_updated.happened() factory.new_block.watch(new_block) print 'Started successfully!' print ht.updated.watch(lambda x: set_real_work2()) @defer.inlineCallbacks def work1_thread(): while True: flag = work_updated.get_deferred() try: yield set_real_work1() except: log.err() yield defer.DeferredList([flag, deferral.sleep(random.expovariate(1/20))], fireOnOneCallback=True) @defer.inlineCallbacks def work2_thread(): while True: try: set_real_work2() except: log.err() yield deferral.sleep(random.expovariate(1/20)) work1_thread() work2_thread() counter = skiplists.CountsSkipList(tracker, run_identifier) while True: yield deferral.sleep(3) try: if current_work.value['best_share_hash'] is not None: height, last = tracker.get_height_and_last(current_work.value['best_share_hash']) if height > 5: att_s = p2pool.get_pool_attempts_per_second(tracker, current_work.value['best_share_hash'], args.net) weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 120), 2**100) matching_in_chain = counter(current_work.value['best_share_hash'], height) shares_in_chain = my_shares & matching_in_chain stale_shares = my_shares - matching_in_chain print 'Pool: %sH/s in %i shares Recent: %.02f%% >%sH/s Shares: %i (%i stale) Peers: %i' % ( math.format(att_s), height, weights.get(my_script, 0)/total_weight*100, math.format(weights.get(my_script, 0)/total_weight*att_s), len(shares_in_chain) + len(stale_shares), len(stale_shares), len(p2p_node.peers), ) #weights, total_weight = tracker.get_cumulative_weights(current_work.value['best_share_hash'], min(height, 100), 2**100) #for k, v in weights.iteritems(): # print k.encode('hex'), v/total_weight except: log.err() except: log.err(None, 'Fatal error:') reactor.stop()
def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: if time.time() > current_work2.value['last_update'] + 60: print '''---> LOST CONTACT WITH BITCOIND for 60 seconds, check that it isn't frozen or dead <---''' if current_work.value['best_share_hash'] is not None: height, last = tracker.get_height_and_last( current_work.value['best_share_hash']) if height > 2: att_s = p2pool_data.get_pool_attempts_per_second( tracker, current_work.value['best_share_hash'], min(height - 1, 720)) weights, total_weight, donation_weight = tracker.get_cumulative_weights( current_work.value['best_share_hash'], min(height, 720), 65535 * 2**256) shares, stale_doa_shares, stale_not_doa_shares = get_share_counts( True) stale_shares = stale_doa_shares + stale_not_doa_shares fracs = [ share.stale_frac for share in tracker.get_chain( current_work.value['best_share_hash'], min(120, height)) if share.stale_frac is not None ] this_str = 'Pool: %sH/s in %i shares (%i/%i verified) Recent: %.02f%% >%sH/s Shares: %i (%i orphan, %i dead) Peers: %i' % ( math.format( int(att_s / (1. - (math.median(fracs) if fracs else 0))) ), height, len(tracker.verified.shares), len(tracker.shares), weights.get(my_script, 0) / total_weight * 100, math.format( int( weights.get(my_script, 0) * att_s // total_weight / (1. - (math.median(fracs) if fracs else 0))) ), shares, stale_not_doa_shares, stale_doa_shares, len(p2p_node.peers), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') if fracs: med = math.median(fracs) this_str += '\nPool stales: %i%%' % ( int(100 * med + .5), ) conf = 0.95 if shares: this_str += u' Own: %i±%i%%' % tuple( int(100 * x + .5) for x in math.interval_to_center_radius( math.binomial_conf_interval( stale_shares, shares, conf))) if med < .99: this_str += u' Own efficiency: %i±%i%%' % tuple( int(100 * x + .5) for x in math.interval_to_center_radius( (1 - y) / (1 - med) for y in math.binomial_conf_interval( stale_shares, shares, conf)[::-1])) if this_str != last_str or time.time( ) > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err()