def get_user_details(self, username): contents = re.split('([+/])', username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] desired_pseudoshare_target = None desired_share_target = None for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def get_payout_script_from_username(user): if user is None: return None try: return bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, net)) except: # XXX blah return None
def get_user_details(self, request): user = request.getUser() if request.getUser() is not None else "" desired_pseudoshare_target = None if "+" in user: user, desired_pseudoshare_difficulty_str = user.rsplit("+", 1) try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target( float(desired_pseudoshare_difficulty_str) ) except: pass desired_share_target = 2 ** 256 - 1 if "/" in user: user, min_diff_str = user.rsplit("/", 1) try: desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) except: pass if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def get_user_details(self, user): desired_pseudoshare_target = None if '+' in user: user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1) try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str)) except: pass desired_share_target = 2**256 - 1 if '/' in user: user, min_diff_str = user.rsplit('/', 1) try: desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) except: pass current_txouts = self.node.get_current_txouts() current_pay_out = current_txouts.get(bitcoin_data.pubkey_to_script2(self.my_pubkey), 0)*1e-6 if current_pay_out == 0 or random.uniform(0, 100) < self.worker_fee: script = bitcoin_data.pubkey_to_script2(self.my_pubkey) else: try: script = bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT)) except: # XXX blah script = bitcoin_data.pubkey_to_script2(self.my_pubkey) return user, script, desired_share_target, desired_pseudoshare_target
def get_user_details(self, user): desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float("0.00058207")) if '+' in user: user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1) try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(desired_pseudoshare_difficulty_str)) except: pass desired_share_target = 2**256 - 1 if '/' in user: user, min_diff_str = user.rsplit('/', 1) try: desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) except: pass if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def get_user_details(self, username): contents = re.split("[+/]", username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] desired_pseudoshare_target = None desired_share_target = 2 ** 256 - 1 for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == "+": try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) except: pass elif symbol == "/": try: desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) except: pass if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def get_payout_script2(bitcoind, net): address = yield bitcoind.rpc_getaccountaddress('p2pool') validate_response = yield bitcoind.rpc_validateaddress(address) if 'pubkey' not in validate_response: print ' Pubkey request failed. Falling back to payout to address.' defer.returnValue(bitcoin_data.pubkey_hash_to_script2(bitcoin_data.address_to_pubkey_hash(address, net))) pubkey = validate_response['pubkey'].decode('hex') defer.returnValue(bitcoin_data.pubkey_to_script2(pubkey))
def freshen_addresses(self, c): self.cur_address_throttle = time.time() if self.cur_address_throttle - self.address_throttle < 30: return self.address_throttle=time.time() print "ATTEMPTING TO FRESHEN ADDRESS." self.address = yield deferral.retry('Error getting a dynamic address from bitcoind:', 5)(lambda: self.bitcoind.rpc_getnewaddress('p2pool'))() new_pubkey = bitcoin_data.address_to_pubkey_hash(self.address, self.net) self.pubkeys.popleft() self.pubkeys.addkey(new_pubkey) print " Updated payout pool:" for i in range(len(self.pubkeys.keys)): print ' ...payout %d: %s(%f)' % (i, bitcoin_data.pubkey_hash_to_address(self.pubkeys.keys[i], self.net),self.pubkeys.keyweights[i],) self.pubkeys.updatestamp(c) print " Next address rotation in : %fs" % (time.time()-c+self.args.timeaddresses)
def get_user_details(self, username): # desired_pseudoshare_target = None # desired_share_target = None # pubkey_hash = self.my_pubkey_hash # return username, pubkey_hash, desired_share_target, desired_pseudoshare_target for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() if self.args.address == 'dynamic': i = self.pubkeys.weighted() pubkey_hash = self.pubkeys.keys[i] c = time.time() if (c - self.pubkeys.stamp) > self.args.timeaddresses: self.freshen_addresses(c) if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah if self.args.address != 'dynamic': pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) def long(): print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...''' long_dc = reactor.callLater(5, long) yield factory.getProtocol() # waits until handshake is successful if not long_dc.called: long_dc.cancel() print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.HTTPProxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) yield helper.check(bitcoind, net) temp_work = yield helper.getwork(bitcoind) bitcoind_getinfo_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): bitcoind_getinfo_var.set((yield deferral.retry('Error while calling getinfo:')(bitcoind.rpc_getinfo)())) yield poll_warnings() deferral.RobustLoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (temp_work['height'] - 1,) print if not args.testnet: factory = yield connect_p2p() print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address,) else: address = None if address is not None: res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print print "Loading shares..." shares = {} known_verified = set() def share_cb(share): share.time_seen = 0 # XXX shares[share.hash] = share if len(shares) % 1000 == 0 and shares: print " %i" % (len(shares),) ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add) print " ...done loading %i shares (%i verified)!" % (len(shares), len(known_verified)) print print 'Initializing work...' node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) yield node.start() for share_hash in shares: if share_hash not in node.tracker.items: ss.forget_share(share_hash) for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) def save_shares(): for share in node.tracker.get_chain(node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in node.tracker.verified.items: ss.add_verified_hash(share.hash) deferral.RobustLoopingCall(save_shares).start(60) print ' ...success!' print print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(host): port = net.P2P_PORT if ':' in host: host, port_str = host.split(':') port = int(port_str) defer.returnValue(((yield reactor.resolve(host)), port)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() node.p2p_node = p2pool_node.P2PNode(node, port=args.p2pool_port, max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, desired_outgoing_conns=args.p2pool_outgoing_conns, advertise_ip=args.advertise_ip, ) node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(node.p2p_node.addr_store.items())) deferral.RobustLoopingCall(save_addrs).start(60) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) if args.address_share_rate is not None: share_rate_type = 'address' share_rate = args.address_share_rate else: share_rate_type = 'miner' share_rate = args.miner_share_rate wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee, args.min_difficulty, share_rate, share_rate_type) web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var) caching_wb = worker_interface.CachingWorkerBridge(wb) worker_interface.WorkerInterface(caching_wb).attach_to(web_root, get_handler=lambda request: request.redirect('static/')) web_serverfactory = server.Site(web_root) serverfactory = switchprotocol.FirstByteSwitchFactory({'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) deferral.RobustLoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = node.tracker.verified.added.watch(new_share) self.recent_messages = [] def joined(self, channel): self.in_channel = True def left(self, channel): self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0)) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) my_shares_per_s = sum(datum['work']/dt/bitcoin_data.target_to_average_attempts(datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(1/my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(node.tracker, node.best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(node.tracker, node.best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), node.get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % (len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def get_user_details(self, username): contents = re.split('([+/])', username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] desired_pseudoshare_target = None desired_share_target = None for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target(float(parameter)) except: if p2pool.DEBUG: log.err() set_adaptive_target = (self.diff_policy == 'F') or ((self.diff_policy == 'A') and (desired_share_target is None)) set_adaptive_pseudo = (self.diff_policy == 'F') or ((self.diff_policy == 'A') and (desired_pseudoshare_target is None)) user_rate = None pool_rate = None if set_adaptive_target: # calculate pool hashrate height = self.node.tracker.get_height(self.node.best_share_var.value) if height > 5: # we want at least 6 shares in chain stale_prop = p2pool_data.get_average_stale_prop(self.node.tracker, self.node.best_share_var.value, min(60*60//self.node.net.SHARE_PERIOD, height)) pool_rate = p2pool_data.get_pool_attempts_per_second(self.node.tracker, self.node.best_share_var.value, min(height - 1, 60*60//self.node.net.SHARE_PERIOD)) / (1 - stale_prop) if set_adaptive_pseudo or set_adaptive_target: # calculate user's hashrate datums, dt = self.local_rate_monitor.get_datums_in_last() npoints = sum(datum['user'] == user for datum in datums) if npoints > 5: # at least 6 hashrate datums for the user user_rate = 0 for datum in datums: if datum['user'] == user: user_rate += datum['work']/dt if set_adaptive_target: desired_share_target = None if user_rate is not None and pool_rate is not None: if user_rate and pool_rate: # min 20 shares per block AND min 20 shares per chain desired_share_target = 20 * (max(self.node.bitcoind_work.value['bits'].target * pool_rate, 2**256 // (self.node.net.CHAIN_LENGTH * self.node.net.SHARE_PERIOD)) // user_rate) if set_adaptive_pseudo: desired_pseudoshare_target = None if user_rate is not None: if user_rate: # min 20 pseudoshares per 10 minutes desired_pseudoshare_target = 20 * (2**256 // user_rate // (10*60)) if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def run(): if not hasattr(tcp.Client, "abortConnection"): print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!" print "Pausing for 3 seconds..." time.sleep(3) realnets = dict((name, net) for name, net in networks.nets.iteritems() if "_testnet" not in name) parser = fixargparse.FixedArgumentParser( description="p2pool (version %s)" % (p2pool.__version__,), fromfile_prefix_chars="@" ) parser.add_argument("--version", action="version", version=p2pool.__version__) parser.add_argument( "--net", help="use specified network (default: bitcoin)", action="store", choices=sorted(realnets), default="bitcoin", dest="net_name", ) parser.add_argument( "--testnet", help="""use the network's testnet""", action="store_const", const=True, default=False, dest="testnet", ) parser.add_argument( "--debug", help="enable debugging mode", action="store_const", const=True, default=False, dest="debug" ) parser.add_argument( "-a", "--address", help="generate payouts to this address (default: <address requested from bitcoind>)", type=str, action="store", default=None, dest="address", ) parser.add_argument( "--datadir", help="store data in this directory (default: <directory run_p2pool.py is in>/data)", type=str, action="store", default=None, dest="datadir", ) parser.add_argument( "--logfile", help="""log to this file (default: data/<NET>/log)""", type=str, action="store", default=None, dest="logfile", ) parser.add_argument( "--merged", help="call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)", type=str, action="append", default=[], dest="merged_urls", ) parser.add_argument( "--give-author", metavar="DONATION_PERCENTAGE", help="donate this percentage of work towards the development of p2pool (default: 1.0)", type=float, action="store", default=1.0, dest="donation_percentage", ) parser.add_argument( "--iocp", help="use Windows IOCP API in order to avoid errors due to large number of sockets being open", action="store_true", default=False, dest="iocp", ) parser.add_argument( "--irc-announce", help="announce any blocks found on irc://irc.freenode.net/#p2pool", action="store_true", default=False, dest="irc_announce", ) parser.add_argument( "--no-bugreport", help="disable submitting caught exceptions to the author", action="store_true", default=False, dest="no_bugreport", ) p2pool_group = parser.add_argument_group("p2pool interface") p2pool_group.add_argument( "--p2pool-port", metavar="PORT", help="use port PORT to listen for connections (forward this port from your router!) (default: %s)" % ", ".join("%s:%i" % (name, net.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action="store", default=None, dest="p2pool_port", ) p2pool_group.add_argument( "-n", "--p2pool-node", metavar="ADDR[:PORT]", help="connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses", type=str, action="append", default=[], dest="p2pool_nodes", ) parser.add_argument( "--disable-upnp", help="""don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer""", action="store_false", default=True, dest="upnp", ) p2pool_group.add_argument( "--max-conns", metavar="CONNS", help="maximum incoming connections (default: 40)", type=int, action="store", default=40, dest="p2pool_conns", ) p2pool_group.add_argument( "--outgoing-conns", metavar="CONNS", help="outgoing connections (default: 6)", type=int, action="store", default=6, dest="p2pool_outgoing_conns", ) worker_group = parser.add_argument_group("worker interface") worker_group.add_argument( "-w", "--worker-port", metavar="PORT or ADDR:PORT", help="listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)" % ", ".join("%s:%i" % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())), type=str, action="store", default=None, dest="worker_endpoint", ) worker_group.add_argument( "-f", "--fee", metavar="FEE_PERCENTAGE", help="""charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)""", type=float, action="store", default=0, dest="worker_fee", ) bitcoind_group = parser.add_argument_group("bitcoind interface") bitcoind_group.add_argument( "--bitcoind-address", metavar="BITCOIND_ADDRESS", help="connect to this address (default: 127.0.0.1)", type=str, action="store", default="127.0.0.1", dest="bitcoind_address", ) bitcoind_group.add_argument( "--bitcoind-rpc-port", metavar="BITCOIND_RPC_PORT", help="""connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)""" % ", ".join("%s:%i" % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())), type=int, action="store", default=None, dest="bitcoind_rpc_port", ) bitcoind_group.add_argument( "--bitcoind-rpc-ssl", help="connect to JSON-RPC interface using SSL", action="store_true", default=False, dest="bitcoind_rpc_ssl", ) bitcoind_group.add_argument( "--bitcoind-p2p-port", metavar="BITCOIND_P2P_PORT", help="""connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)""" % ", ".join("%s:%i" % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action="store", default=None, dest="bitcoind_p2p_port", ) bitcoind_group.add_argument( metavar="BITCOIND_RPCUSERPASS", help="bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)", type=str, action="store", default=[], nargs="*", dest="bitcoind_rpc_userpass", ) args = parser.parse_args() if args.debug: p2pool.DEBUG = True defer.setDebugging(True) else: p2pool.DEBUG = False net_name = args.net_name + ("_testnet" if args.testnet else "") net = networks.nets[net_name] datadir_path = os.path.join( (os.path.join(os.path.dirname(sys.argv[0]), "data") if args.datadir is None else args.datadir), net_name ) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if len(args.bitcoind_rpc_userpass) > 2: parser.error("a maximum of two arguments are allowed") args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:] if args.bitcoind_rpc_password is None: conf_path = net.PARENT.CONF_FILE_FUNC() if not os.path.exists(conf_path): parser.error( """Bitcoin configuration file not found. Manually enter your RPC password.\r\n""" """If you actually haven't created a configuration file, you should create one at %s with the text:\r\n""" """\r\n""" """server=1\r\n""" """rpcpassword=%x\r\n""" """\r\n""" """Keep that password secret! After creating the file, restart Bitcoin.""" % (conf_path, random.randrange(2 ** 128)) ) conf = open(conf_path, "rb").read() contents = {} for line in conf.splitlines(True): if "#" in line: line = line[: line.index("#")] if "=" not in line: continue k, v = line.split("=", 1) contents[k.strip()] = v.strip() for conf_name, var_name, var_type in [ ("rpcuser", "bitcoind_rpc_username", str), ("rpcpassword", "bitcoind_rpc_password", str), ("rpcport", "bitcoind_rpc_port", int), ("port", "bitcoind_p2p_port", int), ]: if getattr(args, var_name) is None and conf_name in contents: setattr(args, var_name, var_type(contents[conf_name])) if args.bitcoind_rpc_password is None: parser.error("""Bitcoin configuration file didn't contain an rpcpassword= line! Add one!""") if args.bitcoind_rpc_username is None: args.bitcoind_rpc_username = "" if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.PARENT.RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.PARENT.P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.p2pool_outgoing_conns > 10: parser.error("""--outgoing-conns can't be more than 10""") if args.worker_endpoint is None: worker_endpoint = "", net.WORKER_PORT elif ":" not in args.worker_endpoint: worker_endpoint = "", int(args.worker_endpoint) else: addr, port = args.worker_endpoint.rsplit(":", 1) worker_endpoint = addr, int(port) if args.address is not None: try: args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT) except Exception, e: parser.error("error parsing address: " + repr(e))
def run(): if not hasattr(tcp.Client, 'abortConnection'): print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!" print 'Pausing for 3 seconds...' time.sleep(3) realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name) parser = fixargparse.FixedArgumentParser( description='p2pool (version %s)' % (p2pool.__version__, ), fromfile_prefix_chars='@') parser.add_argument('--version', action='version', version=p2pool.__version__) parser.add_argument('--net', help='use specified network (default: bitcoin)', action='store', choices=sorted(realnets), default='bitcoin', dest='net_name') parser.add_argument('--testnet', help='''use the network's testnet''', action='store_const', const=True, default=False, dest='testnet') parser.add_argument('--debug', help='enable debugging mode', action='store_const', const=True, default=False, dest='debug') parser.add_argument('--bench', help='enable CPU performance profiling mode', action='store_const', const=True, default=False, dest='bench') parser.add_argument('--rconsole', help='enable rconsole debugging mode (requires rfoo)', action='store_const', const=True, default=False, dest='rconsole') parser.add_argument( '-a', '--address', help= 'generate payouts to this address (default: <address requested from bitcoind>), or (dynamic)', type=str, action='store', default=None, dest='address') parser.add_argument( '-i', '--numaddresses', help= 'number of bitcoin auto-generated addresses to maintain for getwork dynamic address allocation', type=int, action='store', default=2, dest='numaddresses') parser.add_argument( '-t', '--timeaddresses', help= 'seconds between acquisition of new address and removal of single old (default: 2 days or 172800s)', type=int, action='store', default=172800, dest='timeaddresses') parser.add_argument( '--datadir', help= 'store data in this directory (default: <directory run_p2pool.py is in>/data)', type=str, action='store', default=None, dest='datadir') parser.add_argument('--logfile', help='''log to this file (default: data/<NET>/log)''', type=str, action='store', default=None, dest='logfile') parser.add_argument( '--web-static', help= 'use an alternative web frontend in this directory (otherwise use the built-in frontend)', type=str, action='store', default=None, dest='web_static') parser.add_argument( '--merged', help= 'call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)', type=str, action='append', default=[], dest='merged_urls') parser.add_argument( '--merged_addr', help= 'call createauxblock/submitauxblock on this url to get work for merged mining and use payout address (example: http://ncuser:[email protected]:10332/?payout=payoutaddr)', type=str, action='append', default=[], dest='merged_urls_addr') parser.add_argument('--coinbtext', help='append this text to the coinbase', type=str, action='append', default=[], dest='coinb_texts') parser.add_argument( '--give-author', metavar='DONATION_PERCENTAGE', help= 'donate this percentage of work towards the development of p2pool (default: 0.0)', type=float, action='store', default=0.0, dest='donation_percentage') parser.add_argument( '--iocp', help= 'use Windows IOCP API in order to avoid errors due to large number of sockets being open', action='store_true', default=False, dest='iocp') parser.add_argument( '--irc-announce', help='announce any blocks found on irc://irc.freenode.net/#p2pool', action='store_true', default=False, dest='irc_announce') parser.add_argument( '--no-bugreport', help='disable submitting caught exceptions to the author', action='store_true', default=False, dest='no_bugreport') p2pool_group = parser.add_argument_group('p2pool interface') p2pool_group.add_argument( '--p2pool-port', metavar='PORT', help= 'use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='p2pool_port') p2pool_group.add_argument( '-n', '--p2pool-node', metavar='ADDR[:PORT]', help= 'connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses', type=str, action='append', default=[], dest='p2pool_nodes') parser.add_argument( '--disable-upnp', help= '''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''', action='store_false', default=True, dest='upnp') p2pool_group.add_argument( '--max-conns', metavar='CONNS', help='maximum incoming connections (default: 40)', type=int, action='store', default=40, dest='p2pool_conns') p2pool_group.add_argument('--outgoing-conns', metavar='CONNS', help='outgoing connections (default: 6)', type=int, action='store', default=6, dest='p2pool_outgoing_conns') p2pool_group.add_argument( '--external-ip', metavar='ADDR[:PORT]', help= 'specify your own public IP address instead of asking peers to discover it, useful for running dual WAN or asymmetric routing', type=str, action='store', default=None, dest='p2pool_external_ip') parser.add_argument( '--disable-advertise', help= '''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''', action='store_false', default=True, dest='advertise_ip') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument( '-w', '--worker-port', metavar='PORT or ADDR:PORT', help= 'listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())), type=str, action='store', default=None, dest='worker_endpoint') worker_group.add_argument( '-f', '--fee', metavar='FEE_PERCENTAGE', help= '''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''', type=float, action='store', default=0, dest='worker_fee') worker_group.add_argument( '-s', '--share-rate', metavar='SECONDS_PER_SHARE', help= 'Auto-adjust mining difficulty on each connection to target this many seconds per pseudoshare (default: %3.0f)' % 3., type=float, action='store', default=3., dest='share_rate') bitcoind_group = parser.add_argument_group('bitcoind interface') bitcoind_group.add_argument( '--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH', help='custom configuration file path (when bitcoind -conf option used)', type=str, action='store', default=None, dest='bitcoind_config_path') bitcoind_group.add_argument( '--bitcoind-address', metavar='BITCOIND_ADDRESS', help='connect to this address (default: 127.0.0.1)', type=str, action='store', default='127.0.0.1', dest='bitcoind_address') bitcoind_group.add_argument( '--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help= '''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') bitcoind_group.add_argument('--bitcoind-rpc-ssl', help='connect to JSON-RPC interface using SSL', action='store_true', default=False, dest='bitcoind_rpc_ssl') bitcoind_group.add_argument( '--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help= '''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') bitcoind_group.add_argument( metavar='BITCOIND_RPCUSERPASS', help= 'bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)', type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass') bitcoind_group.add_argument( '--allow-obsolete-bitcoind', help= 'allow the use of coin daemons (bitcoind) that do not support all of the required softforks for this network (e.g. Bitcoin Core and segwit2x)', action='store_const', const=True, default=False, dest='allow_obsolete_bitcoind') args = parser.parse_args() if args.debug: p2pool.DEBUG = True defer.setDebugging(True) else: p2pool.DEBUG = False p2pool.BENCH = args.bench net_name = args.net_name + ('_testnet' if args.testnet else '') net = networks.nets[net_name] datadir_path = os.path.join( (os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if len(args.bitcoind_rpc_userpass) > 2: parser.error('a maximum of two arguments are allowed') args.bitcoind_rpc_username, args.bitcoind_rpc_password = ( [None, None] + args.bitcoind_rpc_userpass)[-2:] if args.bitcoind_rpc_password is None: conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC() if not os.path.exists(conf_path): parser.error( '''Bitcoin configuration file not found. Manually enter your RPC password.\r\n''' '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n''' '''\r\n''' '''server=1\r\n''' '''rpcpassword=%x\r\n''' '''\r\n''' '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128))) conf = open(conf_path, 'rb').read() contents = {} for line in conf.splitlines(True): if '#' in line: line = line[:line.index('#')] if '=' not in line: continue k, v = line.split('=', 1) contents[k.strip()] = v.strip() for conf_name, var_name, var_type in [ ('rpcuser', 'bitcoind_rpc_username', str), ('rpcpassword', 'bitcoind_rpc_password', str), ('rpcport', 'bitcoind_rpc_port', int), ('port', 'bitcoind_p2p_port', int), ]: if getattr(args, var_name) is None and conf_name in contents: setattr(args, var_name, var_type(contents[conf_name])) if 'rpcssl' in contents and contents['rpcssl'] != '0': args.bitcoind_rpc_ssl = True if args.bitcoind_rpc_password is None: parser.error( '''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''' ) if args.bitcoind_rpc_username is None: args.bitcoind_rpc_username = '' if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.PARENT.RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.PARENT.P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.p2pool_outgoing_conns > 10: parser.error('''--outgoing-conns can't be more than 10''') if args.worker_endpoint is None: worker_endpoint = '', net.WORKER_PORT elif ':' not in args.worker_endpoint: worker_endpoint = '', int(args.worker_endpoint) else: addr, port = args.worker_endpoint.rsplit(':', 1) worker_endpoint = addr, int(port) if args.address is not None and args.address != 'dynamic': try: _ = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT) args.pubkey_hash = True except Exception, e: parser.error('error parsing address: ' + repr(e))
def get_user_details(self, username): contents = re.split('([+/])', username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] worker = '' if '_' in user: worker = user.split('_')[1] user = user.split('_')[0] elif '.' in user: worker = user.split('.')[1] user = user.split('.')[0] desired_pseudoshare_target = None desired_share_target = None for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target( float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target( float(parameter)) except: if p2pool.DEBUG: log.err() if self.args.address == 'dynamic': i = self.pubkeys.weighted() address = self.pubkeys.keys[i]['address'] c = time.time() if (c - self.pubkeys.stamp) > self.args.timeaddresses: self.freshen_addresses(c) if random.uniform(0, 100) < self.worker_fee: address = self.address else: try: if self.node.best_share_var.value is not None: share_type = type(self.node.tracker.items[ self.node.best_share_var.value]) else: share_type = p2pool_data.Share ret = bitcoin_data.address_to_pubkey_hash( user, self.node.net.PARENT) if share_type.VERSION < 34 and ret[ 1] != self.node.net.PARENT.ADDRESS_VERSION: print( "not supporting %s yet, share version needs to be 34, but is %s." % (user, share_type.VERSION)) raise ValueError address = user except Exception: # XXX blah if self.args.address != 'dynamic': address = self.address if worker: user = user + '.' + worker return (user, address, desired_share_target, desired_pseudoshare_target)
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__, ) print @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % ( args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) def long(): print ''' ...taking a while. Common reasons for this include all of bitcoind's connection slots being used...''' long_dc = reactor.callLater(5, long) yield factory.getProtocol() # waits until handshake is successful if not long_dc.called: long_dc.cancel() print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % ( url, args.bitcoind_rpc_username) bitcoind = jsonrpc.HTTPProxy( url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) yield helper.check(bitcoind, net, args) temp_work = yield helper.getwork(bitcoind) bitcoind_getinfo_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): bitcoind_getinfo_var.set( (yield deferral.retry('Error while calling getinfo:')( bitcoind.rpc_getnetworkinfo)())) yield poll_warnings() deferral.RobustLoopingCall(poll_warnings).start(20 * 60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'], ) print ' Current block height: %i' % (temp_work['height'] - 1, ) print if not args.testnet: factory = yield connect_p2p() print 'Determining payout address...' pubkeys = keypool() if args.pubkey_hash is None and args.address != 'dynamic': address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address, ) else: address = None if address is not None: res = yield deferral.retry( 'Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry( 'Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash( address, net.PARENT) print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address( my_pubkey_hash, net.PARENT) print pubkeys.addkey(my_pubkey_hash) elif args.address != 'dynamic': my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address( my_pubkey_hash, net.PARENT) print pubkeys.addkey(my_pubkey_hash) else: print ' Entering dynamic address mode.' if args.numaddresses < 2: print ' ERROR: Can not use fewer than 2 addresses in dynamic mode. Resetting to 2.' args.numaddresses = 2 for i in range(args.numaddresses): address = yield deferral.retry( 'Error getting a dynamic address from bitcoind:', 5)(lambda: bitcoind.rpc_getnewaddress('p2pool'))() new_pubkey = bitcoin_data.address_to_pubkey_hash( address, net.PARENT) pubkeys.addkey(new_pubkey) pubkeys.updatestamp(time.time()) my_pubkey_hash = pubkeys.keys[0] for i in range(len(pubkeys.keys)): print ' ...payout %d: %s' % ( i, bitcoin_data.pubkey_hash_to_address( pubkeys.keys[i], net.PARENT), ) print "Loading shares..." shares = {} known_verified = set() def share_cb(share): share.time_seen = 0 # XXX shares[share.hash] = share if len(shares) % 1000 == 0 and shares: print " %i" % (len(shares), ) ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net, share_cb, known_verified.add) print " ...done loading %i shares (%i verified)!" % ( len(shares), len(known_verified)) print print 'Initializing work...' global gnode gnode = node = p2pool_node.Node(factory, bitcoind, shares.values(), known_verified, net) yield node.start() for share_hash in shares: if share_hash not in node.tracker.items: ss.forget_share(share_hash) for share_hash in known_verified: if share_hash not in node.tracker.verified.items: ss.forget_verified_share(share_hash) node.tracker.removed.watch(lambda share: ss.forget_share(share.hash)) node.tracker.verified.removed.watch( lambda share: ss.forget_verified_share(share.hash)) def save_shares(): for share in node.tracker.get_chain( node.best_share_var.value, min(node.tracker.get_height(node.best_share_var.value), 2 * net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in node.tracker.verified.items: ss.add_verified_hash(share.hash) deferral.RobustLoopingCall(save_shares).start(60) if len(shares) > net.CHAIN_LENGTH: best_share = shares[node.best_share_var.value] previous_share = shares[ best_share.share_data['previous_share_hash']] counts = p2pool_data.get_desired_version_counts( node.tracker, node.tracker.get_nth_parent_hash(previous_share.hash, net.CHAIN_LENGTH * 9 // 10), net.CHAIN_LENGTH // 10) p2pool_data.update_min_protocol_version(counts, best_share) print ' ...success!' print print 'Joining p2pool network using port %i...' % (args.p2pool_port, ) @defer.inlineCallbacks def parse(host): port = net.P2P_PORT if ':' in host: host, port_str = host.split(':') port = int(port_str) defer.returnValue(((yield reactor.resolve(host)), port)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update( dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >> sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() node.p2p_node = p2pool_node.P2PNode( node, port=args.p2pool_port, max_incoming_conns=args.p2pool_conns, addr_store=addrs, connect_addrs=connect_addrs, desired_outgoing_conns=args.p2pool_outgoing_conns, advertise_ip=args.advertise_ip, external_ip=args.p2pool_external_ip, ) node.p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(node.p2p_node.addr_store.items())) deferral.RobustLoopingCall(save_addrs).start(60) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping( lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1 / 120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) if args.address_share_rate is not None: share_rate_type = 'address' share_rate = args.address_share_rate else: share_rate_type = 'miner' share_rate = args.miner_share_rate wb = work.WorkerBridge(node, my_pubkey_hash, args.donation_percentage, merged_urls, args.worker_fee, args.min_difficulty, share_rate, share_rate_type, args, pubkeys, bitcoind) web_root = web.get_web_root(wb, datadir_path, bitcoind_getinfo_var, static_dir=args.web_static) caching_wb = worker_interface.CachingWorkerBridge(wb) worker_interface.WorkerInterface(caching_wb).attach_to( web_root, get_handler=lambda request: request.redirect('/static/')) web_serverfactory = server.Site(web_root) serverfactory = switchprotocol.FirstByteSwitchFactory( {'{': stratum.StratumServerFactory(caching_wb)}, web_serverfactory) deferral.retry('Error binding to worker port:', traceback=False)( reactor.listenTCP)(worker_endpoint[1], serverfactory, interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % ( worker_endpoint[1], ) if args.donation_percentage > 1.1: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % ( args.donation_percentage, ) elif args.donation_percentage < .9: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % ( args.donation_percentage, ) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % ( args.donation_percentage, ) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal( signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''. join(traceback.format_stack()))) signal.siginterrupt(signal.SIGALRM, False) deferral.RobustLoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100), ) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): self.in_channel = False irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if not self.in_channel: return if share.pow_hash <= share.header[ 'bits'].target and abs(share.timestamp - time.time()) < 10 * 60: yield deferral.sleep(random.expovariate(1 / 60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % ( net.NAME.upper(), bitcoin_data.script2_to_address( share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash, ) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = node.tracker.verified.added.watch( new_share) self.recent_messages = [] def joined(self, channel): self.in_channel = True def left(self, channel): self.in_channel = False def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): node.tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory(), bindAddress=(worker_endpoint[0], 0)) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(30) try: height = node.tracker.get_height(node.best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(node.tracker.verified.items), len(node.tracker.items), len(node.p2p_node.peers), sum(1 for peer in node.p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work'] / dt for datum in datums) my_shares_per_s = sum( datum['work'] / dt / bitcoin_data.target_to_average_attempts( datum['share_target']) for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf( sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(1 / my_shares_per_s) if my_shares_per_s else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop( node.tracker, node.best_share_var.value, min(60 * 60 // net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second( node.tracker, node.best_share_var.value, min(height - 1, 60 * 60 // net.SHARE_PERIOD)) / (1 - stale_prop) paystr = '' paytot = 0.0 for i in range(len(pubkeys.keys)): curtot = node.get_current_txouts().get( bitcoin_data.pubkey_hash_to_script2( pubkeys.keys[i]), 0) paytot += curtot * 1e-8 paystr += "(%.4f)" % (curtot * 1e-8, ) paystr += "=%.4f" % (paytot, ) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %s %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x) / (1 - stale_prop)), paystr, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100 * stale_prop, math.format_dt( 2**256 / node.bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings( node.tracker, node.best_share_var.value, net, bitcoind_getinfo_var.value, node.bitcoind_work.value): print >> sys.stderr, '#' * 40 print >> sys.stderr, '>>> Warning: ' + warning print >> sys.stderr, '#' * 40 if gc.garbage: print '%i pieces of uncollectable cyclic garbage! Types: %r' % ( len(gc.garbage), map(type, gc.garbage)) if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def run(): if not hasattr(tcp.Client, 'abortConnection'): print "Twisted doesn't have abortConnection! Upgrade to a newer version of Twisted to avoid memory leaks!" print 'Pausing for 3 seconds...' time.sleep(3) realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name) parser = fixargparse.FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@') parser.add_argument('--version', action='version', version=p2pool.__version__) parser.add_argument('--net', help='use specified network (default: bitcoin)', action='store', choices=sorted(realnets), default='bitcoin', dest='net_name') parser.add_argument('--testnet', help='''use the network's testnet''', action='store_const', const=True, default=False, dest='testnet') parser.add_argument('--debug', help='enable debugging mode', action='store_const', const=True, default=False, dest='debug') parser.add_argument('-a', '--address', help='generate payouts to this address (default: <address requested from bitcoind>)', type=str, action='store', default=None, dest='address') parser.add_argument('--datadir', help='store data in this directory (default: <directory run_p2pool.py is in>/data)', type=str, action='store', default=None, dest='datadir') parser.add_argument('--logfile', help='''log to this file (default: data/<NET>/log)''', type=str, action='store', default=None, dest='logfile') parser.add_argument('--merged', help='call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)', type=str, action='append', default=[], dest='merged_urls') parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE', help='donate this percentage of work towards the development of p2pool (default: 1.0)', type=float, action='store', default=0, dest='donation_percentage') parser.add_argument('--iocp', help='use Windows IOCP API in order to avoid errors due to large number of sockets being open', action='store_true', default=False, dest='iocp') parser.add_argument('--irc-announce', help='announce any blocks found on irc://irc.freenode.net/#p2pool', action='store_true', default=False, dest='irc_announce') parser.add_argument('--no-bugreport', help='disable submitting caught exceptions to the author', action='store_true', default=False, dest='no_bugreport') p2pool_group = parser.add_argument_group('p2pool interface') p2pool_group.add_argument('--p2pool-port', metavar='PORT', help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='p2pool_port') p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]', help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses', type=str, action='append', default=[], dest='p2pool_nodes') parser.add_argument('--disable-upnp', help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''', action='store_false', default=True, dest='upnp') p2pool_group.add_argument('--max-conns', metavar='CONNS', help='maximum incoming connections (default: 40)', type=int, action='store', default=40, dest='p2pool_conns') p2pool_group.add_argument('--outgoing-conns', metavar='CONNS', help='outgoing connections (default: 6)', type=int, action='store', default=6, dest='p2pool_outgoing_conns') parser.add_argument('--disable-advertise', help='''don't advertise local IP address as being available for incoming connections. useful for running a dark node, along with multiple -n ADDR's and --outgoing-conns 0''', action='store_false', default=False, dest='advertise_ip') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument('-w', '--worker-port', metavar='PORT or ADDR:PORT', help='listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())), type=str, action='store', default=None, dest='worker_endpoint') worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE', help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''', type=float, action='store', default=0.5, dest='worker_fee') worker_group.add_argument('--miner-share-rate', metavar='SHARES_PER_MINUTE', help='number of psuedoshares per minute for each miner', type=float, action='store', default=None, dest='miner_share_rate') worker_group.add_argument('--address-share-rate', metavar='SHARES_PER_MINUTE', help='number of psuedoshares per minute for each address', type=float, action='store', default=None, dest='address_share_rate') worker_group.add_argument('--min-difficulty', metavar='DIFFICULTY', help='minium difficulty for miners', type=float, action='store', default=1.0, dest='min_difficulty') bitcoind_group = parser.add_argument_group('bitcoind interface') bitcoind_group.add_argument('--bitcoind-config-path', metavar='BITCOIND_CONFIG_PATH', help='custom configuration file path (when bitcoind -conf option used)', type=str, action='store', default=None, dest='bitcoind_config_path') bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS', help='connect to this address (default: 127.0.0.1)', type=str, action='store', default='127.0.0.1', dest='bitcoind_address') bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help='''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') bitcoind_group.add_argument('--bitcoind-rpc-ssl', help='connect to JSON-RPC interface using SSL', action='store_true', default=False, dest='bitcoind_rpc_ssl') bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help='''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') bitcoind_group.add_argument(metavar='BITCOIND_RPCUSERPASS', help='bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)', type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass') args = parser.parse_args() if args.debug: p2pool.DEBUG = True defer.setDebugging(True) else: p2pool.DEBUG = False net_name = args.net_name + ('_testnet' if args.testnet else '') net = networks.nets[net_name] datadir_path = os.path.join((os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if len(args.bitcoind_rpc_userpass) > 2: parser.error('a maximum of two arguments are allowed') args.bitcoind_rpc_username, args.bitcoind_rpc_password = ([None, None] + args.bitcoind_rpc_userpass)[-2:] if args.bitcoind_rpc_password is None: conf_path = args.bitcoind_config_path or net.PARENT.CONF_FILE_FUNC() if not os.path.exists(conf_path): parser.error('''Bitcoin configuration file not found. Manually enter your RPC password.\r\n''' '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n''' '''\r\n''' '''server=1\r\n''' '''rpcpassword=%x\r\n''' '''\r\n''' '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128))) conf = open(conf_path, 'rb').read() contents = {} for line in conf.splitlines(True): if '#' in line: line = line[:line.index('#')] if '=' not in line: continue k, v = line.split('=', 1) contents[k.strip()] = v.strip() for conf_name, var_name, var_type in [ ('rpcuser', 'bitcoind_rpc_username', str), ('rpcpassword', 'bitcoind_rpc_password', str), ('rpcport', 'bitcoind_rpc_port', int), ('port', 'bitcoind_p2p_port', int), ]: if getattr(args, var_name) is None and conf_name in contents: setattr(args, var_name, var_type(contents[conf_name])) if args.bitcoind_rpc_password is None: parser.error('''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''') if args.bitcoind_rpc_username is None: args.bitcoind_rpc_username = '' if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.PARENT.RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.PARENT.P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.p2pool_outgoing_conns > 10: parser.error('''--outgoing-conns can't be more than 10''') if args.worker_endpoint is None: worker_endpoint = '', net.WORKER_PORT elif ':' not in args.worker_endpoint: worker_endpoint = '', int(args.worker_endpoint) else: addr, port = args.worker_endpoint.rsplit(':', 1) worker_endpoint = addr, int(port) if args.address is not None: try: args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net.PARENT) except Exception, e: parser.error('error parsing address: ' + repr(e))
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__, ) print traffic_happened = variable.Event() @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % ( args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % ( url, args.bitcoind_rpc_username) bitcoind = jsonrpc.Proxy( url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) @deferral.retry('Error while checking Bitcoin connection:', 1) @defer.inlineCallbacks def check(): if not (yield net.PARENT.RPC_CHECK(bitcoind)): print >> sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!" raise deferral.RetrySilentlyException() if not net.VERSION_CHECK( (yield bitcoind.rpc_getinfo())['version']): print >> sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!' raise deferral.RetrySilentlyException() yield check() temp_work = yield getwork(bitcoind) if not args.testnet: factory = yield connect_p2p() block_height_var = variable.Variable(None) @defer.inlineCallbacks def poll_height(): block_height_var.set( (yield deferral.retry('Error while calling getblockcount:')( bitcoind.rpc_getblockcount)())) yield poll_height() task.LoopingCall(poll_height).start(60 * 60) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')( bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20 * 60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'], ) print ' Current block height: %i' % (block_height_var.value, ) print print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address, ) else: address = None if address is not None: res = yield deferral.retry( 'Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry( 'Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash( address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address( my_pubkey_hash, net.PARENT) print my_share_hashes = set() my_doa_share_hashes = set() tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes) shared_share_hashes = set() ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': if contents.hash in tracker.items: continue shared_share_hashes.add(contents.hash) contents.time_seen = 0 tracker.add(contents) if len(tracker.items) % 1000 == 0 and tracker.items: print " %i" % (len(tracker.items), ) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...inserting %i verified shares..." % ( len(known_verified), ) for h in known_verified: if h not in tracker.items: ss.forget_verified_share(h) continue tracker.verified.add(tracker.items[h]) print " ...done loading %i shares!" % (len(tracker.items), ) print tracker.removed.watch(lambda share: ss.forget_share(share.hash)) tracker.verified.removed.watch( lambda share: ss.forget_verified_share(share.hash)) tracker.removed.watch( lambda share: shared_share_hashes.discard(share.hash)) print 'Initializing work...' # BITCOIND WORK bitcoind_work = variable.Variable((yield getwork(bitcoind))) @defer.inlineCallbacks def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set( (yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target): return bitcoind_best_block = bitcoind_work.value['previous_block'] if (best_block_header.value is None or (new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256( bitcoin_data.block_header_type.pack( best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or (bitcoin_data.hash256( bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and best_block_header.value['previous_block'] != bitcoind_best_block) ): # new is current and previous is not a child of current best_block_header.set(new_header) @defer.inlineCallbacks def poll_header(): handle_header((yield factory.conn.value.get_block_header( bitcoind_work.value['previous_block']))) bitcoind_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')( poll_header)() # BEST SHARE get_height_rel_highest = yield height_tracker.get_height_rel_highest_func( bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net) best_share_var = variable.Variable(None) desired_var = variable.Variable(None) def set_best_share(): best, desired = tracker.think( get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits']) best_share_var.set(best) desired_var.set(desired) bitcoind_work.changed.watch(lambda _: set_best_share()) set_best_share() print ' ...success!' print # setup p2p logic and join p2pool network class Node(p2p.Node): def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % ( len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 for share in shares: if share.hash in tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None) tracker.add(share) if new_count: set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % ( len(shares), new_count, len( tracker.items), 2 * net.CHAIN_LENGTH) @defer.inlineCallbacks def handle_share_hashes(self, hashes, peer): new_hashes = [x for x in hashes if x not in tracker.items] if not new_hashes: return try: shares = yield peer.get_shares( hashes=new_hashes, parents=0, stops=[], ) except: log.err(None, 'in handle_share_hashes:') else: self.handle_shares(shares, peer) def handle_get_shares(self, hashes, parents, stops, peer): parents = min(parents, 1000 // len(hashes)) stops = set(stops) shares = [] for share_hash in hashes: for share in tracker.get_chain( share_hash, min(parents + 1, tracker.get_height(share_hash))): if share.hash in stops: break shares.append(share) print 'Sending %i shares to %s:%i' % ( len(shares), peer.addr[0], peer.addr[1]) return shares def handle_bestblock(self, header, peer): if net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack( header)) > header['bits'].target: raise p2p.PeerMisbehavingError( 'received block header fails PoW test') handle_header(header) @deferral.retry('Error submitting primary block: (will retry)', 10, 10) def submit_block_p2p(block): if factory.conn.value is None: print >> sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % ( net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256( bitcoin_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block) @deferral.retry('Error submitting block: (will retry)', 10, 10) @defer.inlineCallbacks def submit_block_rpc(block, ignore_failure): if bitcoind_work.value['use_getblocktemplate']: result = yield bitcoind.rpc_submitblock( bitcoin_data.block_type.pack(block).encode('hex')) success = result is None else: result = yield bitcoind.rpc_getmemorypool( bitcoin_data.block_type.pack(block).encode('hex')) success = result success_expected = net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack( block['header'])) <= block['header']['bits'].target if (not success and success_expected and not ignore_failure) or (success and not success_expected): print >> sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % ( success, result, success_expected) def submit_block(block, ignore_failure): submit_block_p2p(block) submit_block_rpc(block, ignore_failure) @tracker.verified.added.watch def _(share): if share.pow_hash <= share.header['bits'].target: submit_block(share.as_block(tracker), ignore_failure=True) print print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % ( p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print def spread(): if (get_height_rel_highest(share.header['previous_block']) > -5 or bitcoind_work.value['previous_block'] in [ share.header['previous_block'], share.header_hash ]): broadcast_share(share.hash) spread() reactor.callLater( 5, spread) # so get_height_rel_highest can update print 'Joining p2pool network using port %i...' % (args.p2pool_port, ) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update( dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >> sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() p2p_node = Node( best_share_hash_func=lambda: best_share_var.value, port=args.p2pool_port, net=net, addr_store=addrs, connect_addrs=connect_addrs, max_incoming_conns=args.p2pool_conns, traffic_happened=traffic_happened, ) p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) @best_block_header.changed.watch def _(header): for peer in p2p_node.peers.itervalues(): peer.send_bestblock(header=header) @defer.inlineCallbacks def broadcast_share(share_hash): shares = [] for share in tracker.get_chain( share_hash, min(5, tracker.get_height(share_hash))): if share.hash in shared_share_hashes: break shared_share_hashes.add(share.hash) shares.append(share) for peer in list(p2p_node.peers.itervalues()): yield peer.sendShares( [share for share in shares if share.peer is not peer]) # send share when the chain changes to their chain best_share_var.changed.watch(broadcast_share) def save_shares(): for share in tracker.get_chain( best_share_var.value, min(tracker.get_height(best_share_var.value), 2 * net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) @apply @defer.inlineCallbacks def download_shares(): while True: desired = yield desired_var.get_when_satisfies( lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % ( p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep( 1 ) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping( lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1 / 120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) get_current_txouts = lambda: p2pool_data.get_expected_payouts( tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net) wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var) web_root = web.get_web_root( tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened) worker_interface.WorkerInterface(wb).attach_to( web_root, get_handler=lambda request: request.redirect('/static/')) deferral.retry('Error binding to worker port:', traceback=False)( reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % ( worker_endpoint[1], ) if args.donation_percentage > 0.51: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % ( args.donation_percentage, ) elif args.donation_percentage < 0.49: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % ( args.donation_percentage, ) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % ( args.donation_percentage, ) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal( signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''. join(traceback.format_stack()))) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100), ) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if share.pow_hash <= share.header[ 'bits'].target and abs(share.timestamp - time.time()) < 10 * 60: yield deferral.sleep(random.expovariate(1 / 60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % ( net.NAME.upper(), bitcoin_data.script2_to_address( share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash, ) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = tracker.verified.added.watch(new_share) self.recent_messages = [] def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = tracker.get_height(best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(tracker.verified.items), len(tracker.items), len(p2p_node.peers), sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work'] / dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf( sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / tracker.items[ best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop( tracker, best_share_var.value, min(60 * 60 // net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second( tracker, best_share_var.value, min(height - 1, 60 * 60 // net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf( stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x) / (1 - stale_prop)), get_current_txouts().get( bitcoin_data.pubkey_hash_to_script2( my_pubkey_hash), 0) * 1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100 * stale_prop, math.format_dt( 2**256 / bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings( tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value): print >> sys.stderr, '#' * 40 print >> sys.stderr, '>>> Warning: ' + warning print >> sys.stderr, '#' * 40 if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def run(): realnets = dict((name, net) for name, net in networks.nets.iteritems() if '_testnet' not in name) parser = fixargparse.FixedArgumentParser( description='p2pool (version %s)' % (p2pool.__version__, ), fromfile_prefix_chars='@') parser.add_argument('--version', action='version', version=p2pool.__version__) parser.add_argument('--net', help='use specified network (default: bitcoin)', action='store', choices=sorted(realnets), default='bitcoin', dest='net_name') parser.add_argument('--testnet', help='''use the network's testnet''', action='store_const', const=True, default=False, dest='testnet') parser.add_argument('--debug', help='enable debugging mode', action='store_const', const=True, default=False, dest='debug') parser.add_argument( '-a', '--address', help= 'generate payouts to this address (default: <address requested from bitcoind>)', type=str, action='store', default=None, dest='address') parser.add_argument( '--datadir', help= 'store data in this directory (default: <directory run_p2pool.py is in>/data)', type=str, action='store', default=None, dest='datadir') parser.add_argument('--logfile', help='''log to this file (default: data/<NET>/log)''', type=str, action='store', default=None, dest='logfile') parser.add_argument( '--merged', help= 'call getauxblock on this url to get work for merged mining (example: http://ncuser:[email protected]:10332/)', type=str, action='append', default=[], dest='merged_urls') parser.add_argument( '--give-author', metavar='DONATION_PERCENTAGE', help= 'donate this percentage of work towards the development of p2pool (default: 0.5)', type=float, action='store', default=0.5, dest='donation_percentage') parser.add_argument( '--iocp', help= 'use Windows IOCP API in order to avoid errors due to large number of sockets being open', action='store_true', default=False, dest='iocp') parser.add_argument( '--irc-announce', help='announce any blocks found on irc://irc.freenode.net/#p2pool', action='store_true', default=False, dest='irc_announce') parser.add_argument( '--no-bugreport', help='disable submitting caught exceptions to the author', action='store_true', default=False, dest='no_bugreport') p2pool_group = parser.add_argument_group('p2pool interface') p2pool_group.add_argument( '--p2pool-port', metavar='PORT', help= 'use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (name, net.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='p2pool_port') p2pool_group.add_argument( '-n', '--p2pool-node', metavar='ADDR[:PORT]', help= 'connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses', type=str, action='append', default=[], dest='p2pool_nodes') parser.add_argument( '--disable-upnp', help= '''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''', action='store_false', default=True, dest='upnp') p2pool_group.add_argument( '--max-conns', metavar='CONNS', help='maximum incoming connections (default: 40)', type=int, action='store', default=40, dest='p2pool_conns') p2pool_group.add_argument('--outgoing-conns', metavar='CONNS', help='outgoing connections (default: 10)', type=int, action='store', default=10, dest='p2pool_outgoing_conns') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument( '-w', '--worker-port', metavar='PORT or ADDR:PORT', help= 'listen on PORT on interface with ADDR for RPC connections from miners (default: all interfaces, %s)' % ', '.join('%s:%i' % (name, net.WORKER_PORT) for name, net in sorted(realnets.items())), type=str, action='store', default=None, dest='worker_endpoint') worker_group.add_argument( '-f', '--fee', metavar='FEE_PERCENTAGE', help= '''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''', type=float, action='store', default=0, dest='worker_fee') bitcoind_group = parser.add_argument_group('bitcoind interface') bitcoind_group.add_argument( '--bitcoind-address', metavar='BITCOIND_ADDRESS', help='connect to this address (default: 127.0.0.1)', type=str, action='store', default='127.0.0.1', dest='bitcoind_address') bitcoind_group.add_argument( '--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help= '''connect to JSON-RPC interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.RPC_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') bitcoind_group.add_argument('--bitcoind-rpc-ssl', help='connect to JSON-RPC interface using SSL', action='store_true', default=False, dest='bitcoind_rpc_ssl') bitcoind_group.add_argument( '--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help= '''connect to P2P interface at this port (default: %s <read from bitcoin.conf if password not provided>)''' % ', '.join('%s:%i' % (name, net.PARENT.P2P_PORT) for name, net in sorted(realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') bitcoind_group.add_argument( metavar='BITCOIND_RPCUSERPASS', help= 'bitcoind RPC interface username, then password, space-separated (only one being provided will cause the username to default to being empty, and none will cause P2Pool to read them from bitcoin.conf)', type=str, action='store', default=[], nargs='*', dest='bitcoind_rpc_userpass') args = parser.parse_args() if args.debug: p2pool.DEBUG = True defer.setDebugging(True) net_name = args.net_name + ('_testnet' if args.testnet else '') net = networks.nets[net_name] datadir_path = os.path.join( (os.path.join(os.path.dirname(sys.argv[0]), 'data') if args.datadir is None else args.datadir), net_name) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if len(args.bitcoind_rpc_userpass) > 2: parser.error('a maximum of two arguments are allowed') args.bitcoind_rpc_username, args.bitcoind_rpc_password = ( [None, None] + args.bitcoind_rpc_userpass)[-2:] if args.bitcoind_rpc_password is None: conf_path = net.PARENT.CONF_FILE_FUNC() if not os.path.exists(conf_path): parser.error( '''Bitcoin configuration file not found. Manually enter your RPC password.\r\n''' '''If you actually haven't created a configuration file, you should create one at %s with the text:\r\n''' '''\r\n''' '''server=1\r\n''' '''rpcpassword=%x\r\n''' '''\r\n''' '''Keep that password secret! After creating the file, restart Bitcoin.''' % (conf_path, random.randrange(2**128))) conf = open(conf_path, 'rb').read() contents = {} for line in conf.splitlines(True): if '#' in line: line = line[:line.index('#')] if '=' not in line: continue k, v = line.split('=', 1) contents[k.strip()] = v.strip() for conf_name, var_name, var_type in [ ('rpcuser', 'bitcoind_rpc_username', str), ('rpcpassword', 'bitcoind_rpc_password', str), ('rpcport', 'bitcoind_rpc_port', int), ('port', 'bitcoind_p2p_port', int), ]: if getattr(args, var_name) is None and conf_name in contents: setattr(args, var_name, var_type(contents[conf_name])) if args.bitcoind_rpc_password is None: parser.error( '''Bitcoin configuration file didn't contain an rpcpassword= line! Add one!''' ) if args.bitcoind_rpc_username is None: args.bitcoind_rpc_username = '' if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.PARENT.RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.PARENT.P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.p2pool_outgoing_conns > 10: parser.error('''--outgoing-conns can't be more than 10''') if args.worker_endpoint is None: worker_endpoint = '', net.WORKER_PORT elif ':' not in args.worker_endpoint: worker_endpoint = '', int(args.worker_endpoint) else: addr, port = args.worker_endpoint.rsplit(':', 1) worker_endpoint = addr, int(port) if args.address is not None: try: args.pubkey_hash = bitcoin_data.address_to_pubkey_hash( args.address, net.PARENT) except Exception, e: parser.error('error parsing address: ' + repr(e))
def get_user_details(self, username): contents = re.split('([+/])', username) assert len(contents) % 2 == 1 user, contents2 = contents[0], contents[1:] desired_pseudoshare_target = None desired_share_target = None for symbol, parameter in zip(contents2[::2], contents2[1::2]): if symbol == '+': try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target( float(parameter)) except: if p2pool.DEBUG: log.err() elif symbol == '/': try: desired_share_target = bitcoin_data.difficulty_to_target( float(parameter)) except: if p2pool.DEBUG: log.err() set_adaptive_target = (self.diff_policy == 'F') or ((self.diff_policy == 'A') and (desired_share_target is None)) set_adaptive_pseudo = (self.diff_policy == 'F') or ( (self.diff_policy == 'A') and (desired_pseudoshare_target is None)) user_rate = None pool_rate = None if set_adaptive_target: # calculate pool hashrate height = self.node.tracker.get_height( self.node.best_share_var.value) if height > 5: # we want at least 6 shares in chain stale_prop = p2pool_data.get_average_stale_prop( self.node.tracker, self.node.best_share_var.value, min(60 * 60 // self.node.net.SHARE_PERIOD, height)) pool_rate = p2pool_data.get_pool_attempts_per_second( self.node.tracker, self.node.best_share_var.value, min(height - 1, 60 * 60 // self.node.net.SHARE_PERIOD)) / (1 - stale_prop) if set_adaptive_pseudo or set_adaptive_target: # calculate user's hashrate datums, dt = self.local_rate_monitor.get_datums_in_last() npoints = sum(datum['user'] == user for datum in datums) if npoints > 5: # at least 6 hashrate datums for the user user_rate = 0 for datum in datums: if datum['user'] == user: user_rate += datum['work'] / dt if set_adaptive_target: desired_share_target = None if user_rate is not None and pool_rate is not None: if user_rate and pool_rate: # min 20 shares per block AND min 20 shares per chain desired_share_target = 20 * (max( self.node.bitcoind_work.value['bits'].target * pool_rate, 2**256 // (self.node.net.CHAIN_LENGTH * self.node.net.SHARE_PERIOD)) // user_rate) if set_adaptive_pseudo: desired_pseudoshare_target = None if user_rate is not None: if user_rate: # min 20 pseudoshares per 10 minutes desired_pseudoshare_target = 20 * (2**256 // user_rate // (10 * 60)) if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash( user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def main(args, net, datadir_path, merged_urls, worker_endpoint): try: print 'p2pool (version %s)' % (p2pool.__version__,) print traffic_happened = variable.Event() @defer.inlineCallbacks def connect_p2p(): # connect to bitcoind over bitcoin-p2p print '''Testing bitcoind P2P connection to '%s:%s'...''' % (args.bitcoind_address, args.bitcoind_p2p_port) factory = bitcoin_p2p.ClientFactory(net.PARENT) reactor.connectTCP(args.bitcoind_address, args.bitcoind_p2p_port, factory) yield factory.getProtocol() # waits until handshake is successful print ' ...success!' print defer.returnValue(factory) if args.testnet: # establish p2p connection first if testnet so bitcoind can work without connections factory = yield connect_p2p() # connect to bitcoind over JSON-RPC and do initial getmemorypool url = '%s://%s:%i/' % ('https' if args.bitcoind_rpc_ssl else 'http', args.bitcoind_address, args.bitcoind_rpc_port) print '''Testing bitcoind RPC connection to '%s' with username '%s'...''' % (url, args.bitcoind_rpc_username) bitcoind = jsonrpc.Proxy(url, dict(Authorization='Basic ' + base64.b64encode(args.bitcoind_rpc_username + ':' + args.bitcoind_rpc_password)), timeout=30) @deferral.retry('Error while checking Bitcoin connection:', 1) @defer.inlineCallbacks def check(): if not (yield net.PARENT.RPC_CHECK(bitcoind)): print >>sys.stderr, " Check failed! Make sure that you're connected to the right bitcoind with --bitcoind-rpc-port!" raise deferral.RetrySilentlyException() if not net.VERSION_CHECK((yield bitcoind.rpc_getinfo())['version']): print >>sys.stderr, ' Bitcoin version too old! Upgrade to 0.6.4 or newer!' raise deferral.RetrySilentlyException() yield check() temp_work = yield getwork(bitcoind) if not args.testnet: factory = yield connect_p2p() block_height_var = variable.Variable(None) @defer.inlineCallbacks def poll_height(): block_height_var.set((yield deferral.retry('Error while calling getblockcount:')(bitcoind.rpc_getblockcount)())) yield poll_height() task.LoopingCall(poll_height).start(60*60) bitcoind_warning_var = variable.Variable(None) @defer.inlineCallbacks def poll_warnings(): errors = (yield deferral.retry('Error while calling getmininginfo:')(bitcoind.rpc_getmininginfo)())['errors'] bitcoind_warning_var.set(errors if errors != '' else None) yield poll_warnings() task.LoopingCall(poll_warnings).start(20*60) print ' ...success!' print ' Current block hash: %x' % (temp_work['previous_block'],) print ' Current block height: %i' % (block_height_var.value,) print print 'Determining payout address...' if args.pubkey_hash is None: address_path = os.path.join(datadir_path, 'cached_payout_address') if os.path.exists(address_path): with open(address_path, 'rb') as f: address = f.read().strip('\r\n') print ' Loaded cached address: %s...' % (address,) else: address = None if address is not None: res = yield deferral.retry('Error validating cached address:', 5)(lambda: bitcoind.rpc_validateaddress(address))() if not res['isvalid'] or not res['ismine']: print ' Cached address is either invalid or not controlled by local bitcoind!' address = None if address is None: print ' Getting payout address from bitcoind...' address = yield deferral.retry('Error getting payout address from bitcoind:', 5)(lambda: bitcoind.rpc_getaccountaddress('p2pool'))() with open(address_path, 'wb') as f: f.write(address) my_pubkey_hash = bitcoin_data.address_to_pubkey_hash(address, net.PARENT) else: my_pubkey_hash = args.pubkey_hash print ' ...success! Payout address:', bitcoin_data.pubkey_hash_to_address(my_pubkey_hash, net.PARENT) print my_share_hashes = set() my_doa_share_hashes = set() tracker = p2pool_data.OkayTracker(net, my_share_hashes, my_doa_share_hashes) shared_share_hashes = set() ss = p2pool_data.ShareStore(os.path.join(datadir_path, 'shares.'), net) known_verified = set() print "Loading shares..." for i, (mode, contents) in enumerate(ss.get_shares()): if mode == 'share': if contents.hash in tracker.items: continue shared_share_hashes.add(contents.hash) contents.time_seen = 0 tracker.add(contents) if len(tracker.items) % 1000 == 0 and tracker.items: print " %i" % (len(tracker.items),) elif mode == 'verified_hash': known_verified.add(contents) else: raise AssertionError() print " ...inserting %i verified shares..." % (len(known_verified),) for h in known_verified: if h not in tracker.items: ss.forget_verified_share(h) continue tracker.verified.add(tracker.items[h]) print " ...done loading %i shares!" % (len(tracker.items),) print tracker.removed.watch(lambda share: ss.forget_share(share.hash)) tracker.verified.removed.watch(lambda share: ss.forget_verified_share(share.hash)) tracker.removed.watch(lambda share: shared_share_hashes.discard(share.hash)) print 'Initializing work...' # BITCOIND WORK bitcoind_work = variable.Variable((yield getwork(bitcoind))) @defer.inlineCallbacks def work_poller(): while True: flag = factory.new_block.get_deferred() try: bitcoind_work.set((yield getwork(bitcoind, bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(new_header)) <= bitcoind_work.value['bits'].target): return bitcoind_best_block = bitcoind_work.value['previous_block'] if (best_block_header.value is None or ( new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256(bitcoin_data.block_header_type.pack(best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or ( bitcoin_data.hash256(bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and best_block_header.value['previous_block'] != bitcoind_best_block )): # new is current and previous is not a child of current best_block_header.set(new_header) @defer.inlineCallbacks def poll_header(): handle_header((yield factory.conn.value.get_block_header(bitcoind_work.value['previous_block']))) bitcoind_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')(poll_header)() # BEST SHARE get_height_rel_highest = yield height_tracker.get_height_rel_highest_func(bitcoind, factory, lambda: bitcoind_work.value['previous_block'], net) best_share_var = variable.Variable(None) desired_var = variable.Variable(None) def set_best_share(): best, desired = tracker.think(get_height_rel_highest, bitcoind_work.value['previous_block'], bitcoind_work.value['bits']) best_share_var.set(best) desired_var.set(desired) bitcoind_work.changed.watch(lambda _: set_best_share()) set_best_share() print ' ...success!' print # setup p2p logic and join p2pool network class Node(p2p.Node): def handle_shares(self, shares, peer): if len(shares) > 5: print 'Processing %i shares from %s...' % (len(shares), '%s:%i' % peer.addr if peer is not None else None) new_count = 0 for share in shares: if share.hash in tracker.items: #print 'Got duplicate share, ignoring. Hash: %s' % (p2pool_data.format_hash(share.hash),) continue new_count += 1 #print 'Received share %s from %r' % (p2pool_data.format_hash(share.hash), share.peer.addr if share.peer is not None else None) tracker.add(share) if new_count: set_best_share() if len(shares) > 5: print '... done processing %i shares. New: %i Have: %i/~%i' % (len(shares), new_count, len(tracker.items), 2*net.CHAIN_LENGTH) @defer.inlineCallbacks def handle_share_hashes(self, hashes, peer): new_hashes = [x for x in hashes if x not in tracker.items] if not new_hashes: return try: shares = yield peer.get_shares( hashes=new_hashes, parents=0, stops=[], ) except: log.err(None, 'in handle_share_hashes:') else: self.handle_shares(shares, peer) def handle_get_shares(self, hashes, parents, stops, peer): parents = min(parents, 1000//len(hashes)) stops = set(stops) shares = [] for share_hash in hashes: for share in tracker.get_chain(share_hash, min(parents + 1, tracker.get_height(share_hash))): if share.hash in stops: break shares.append(share) print 'Sending %i shares to %s:%i' % (len(shares), peer.addr[0], peer.addr[1]) return shares def handle_bestblock(self, header, peer): if net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(header)) > header['bits'].target: raise p2p.PeerMisbehavingError('received block header fails PoW test') handle_header(header) @deferral.retry('Error submitting primary block: (will retry)', 10, 10) def submit_block_p2p(block): if factory.conn.value is None: print >>sys.stderr, 'No bitcoind connection when block submittal attempted! %s%064x' % (net.PARENT.BLOCK_EXPLORER_URL_PREFIX, bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header']))) raise deferral.RetrySilentlyException() factory.conn.value.send_block(block=block) @deferral.retry('Error submitting block: (will retry)', 10, 10) @defer.inlineCallbacks def submit_block_rpc(block, ignore_failure): if bitcoind_work.value['use_getblocktemplate']: result = yield bitcoind.rpc_submitblock(bitcoin_data.block_type.pack(block).encode('hex')) success = result is None else: result = yield bitcoind.rpc_getmemorypool(bitcoin_data.block_type.pack(block).encode('hex')) success = result success_expected = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(block['header'])) <= block['header']['bits'].target if (not success and success_expected and not ignore_failure) or (success and not success_expected): print >>sys.stderr, 'Block submittal result: %s (%r) Expected: %s' % (success, result, success_expected) def submit_block(block, ignore_failure): submit_block_p2p(block) submit_block_rpc(block, ignore_failure) @tracker.verified.added.watch def _(share): if share.pow_hash <= share.header['bits'].target: submit_block(share.as_block(tracker), ignore_failure=True) print print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % (p2pool_data.format_hash(share.hash), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print def spread(): if (get_height_rel_highest(share.header['previous_block']) > -5 or bitcoind_work.value['previous_block'] in [share.header['previous_block'], share.header_hash]): broadcast_share(share.hash) spread() reactor.callLater(5, spread) # so get_height_rel_highest can update print 'Joining p2pool network using port %i...' % (args.p2pool_port,) @defer.inlineCallbacks def parse(x): if ':' in x: ip, port = x.split(':') defer.returnValue(((yield reactor.resolve(ip)), int(port))) else: defer.returnValue(((yield reactor.resolve(x)), net.P2P_PORT)) addrs = {} if os.path.exists(os.path.join(datadir_path, 'addrs')): try: with open(os.path.join(datadir_path, 'addrs'), 'rb') as f: addrs.update(dict((tuple(k), v) for k, v in json.loads(f.read()))) except: print >>sys.stderr, 'error parsing addrs' for addr_df in map(parse, net.BOOTSTRAP_ADDRS): try: addr = yield addr_df if addr not in addrs: addrs[addr] = (0, time.time(), time.time()) except: log.err() connect_addrs = set() for addr_df in map(parse, args.p2pool_nodes): try: connect_addrs.add((yield addr_df)) except: log.err() p2p_node = Node( best_share_hash_func=lambda: best_share_var.value, port=args.p2pool_port, net=net, addr_store=addrs, connect_addrs=connect_addrs, max_incoming_conns=args.p2pool_conns, traffic_happened=traffic_happened, ) p2p_node.start() def save_addrs(): with open(os.path.join(datadir_path, 'addrs'), 'wb') as f: f.write(json.dumps(p2p_node.addr_store.items())) task.LoopingCall(save_addrs).start(60) @best_block_header.changed.watch def _(header): for peer in p2p_node.peers.itervalues(): peer.send_bestblock(header=header) @defer.inlineCallbacks def broadcast_share(share_hash): shares = [] for share in tracker.get_chain(share_hash, min(5, tracker.get_height(share_hash))): if share.hash in shared_share_hashes: break shared_share_hashes.add(share.hash) shares.append(share) for peer in list(p2p_node.peers.itervalues()): yield peer.sendShares([share for share in shares if share.peer is not peer]) # send share when the chain changes to their chain best_share_var.changed.watch(broadcast_share) def save_shares(): for share in tracker.get_chain(best_share_var.value, min(tracker.get_height(best_share_var.value), 2*net.CHAIN_LENGTH)): ss.add_share(share) if share.hash in tracker.verified.items: ss.add_verified_hash(share.hash) task.LoopingCall(save_shares).start(60) @apply @defer.inlineCallbacks def download_shares(): while True: desired = yield desired_var.get_when_satisfies(lambda val: len(val) != 0) peer2, share_hash = random.choice(desired) if len(p2p_node.peers) == 0: yield deferral.sleep(1) continue peer = random.choice(p2p_node.peers.values()) print 'Requesting parent share %s from %s' % (p2pool_data.format_hash(share_hash), '%s:%i' % peer.addr) try: shares = yield peer.get_shares( hashes=[share_hash], parents=500, stops=[], ) except: log.err(None, 'in download_shares:') continue if not shares: yield deferral.sleep(1) # sleep so we don't keep rerequesting the same share nobody has continue p2p_node.handle_shares(shares, peer) print ' ...success!' print if args.upnp: @defer.inlineCallbacks def upnp_thread(): while True: try: is_lan, lan_ip = yield ipdiscover.get_local_ip() if is_lan: pm = yield portmapper.get_port_mapper() yield pm._upnp.add_port_mapping(lan_ip, args.p2pool_port, args.p2pool_port, 'p2pool', 'TCP') except defer.TimeoutError: pass except: if p2pool.DEBUG: log.err(None, 'UPnP error:') yield deferral.sleep(random.expovariate(1/120)) upnp_thread() # start listening for workers with a JSON-RPC server print 'Listening for workers on %r port %i...' % (worker_endpoint[0], worker_endpoint[1]) get_current_txouts = lambda: p2pool_data.get_expected_payouts(tracker, best_share_var.value, bitcoind_work.value['bits'].target, bitcoind_work.value['subsidy'], net) wb = work.WorkerBridge(my_pubkey_hash, net, args.donation_percentage, bitcoind_work, best_block_header, merged_urls, best_share_var, tracker, my_share_hashes, my_doa_share_hashes, args.worker_fee, p2p_node, submit_block, set_best_share, broadcast_share, block_height_var) web_root = web.get_web_root(tracker, bitcoind_work, get_current_txouts, datadir_path, net, wb.get_stale_counts, my_pubkey_hash, wb.local_rate_monitor, args.worker_fee, p2p_node, wb.my_share_hashes, wb.pseudoshare_received, wb.share_received, best_share_var, bitcoind_warning_var, traffic_happened) worker_interface.WorkerInterface(wb).attach_to(web_root, get_handler=lambda request: request.redirect('/static/')) deferral.retry('Error binding to worker port:', traceback=False)(reactor.listenTCP)(worker_endpoint[1], server.Site(web_root), interface=worker_endpoint[0]) with open(os.path.join(os.path.join(datadir_path, 'ready_flag')), 'wb') as f: pass print ' ...success!' print # done! print 'Started successfully!' print 'Go to http://127.0.0.1:%i/ to view graphs and statistics!' % (worker_endpoint[1],) if args.donation_percentage > 0.51: print '''Donating %.1f%% of work towards P2Pool's development. Thanks for the tip!''' % (args.donation_percentage,) elif args.donation_percentage < 0.49: print '''Donating %.1f%% of work towards P2Pool's development. Please donate to encourage further development of P2Pool!''' % (args.donation_percentage,) else: print '''Donating %.1f%% of work towards P2Pool's development. Thank you!''' % (args.donation_percentage,) print 'You can increase this amount with --give-author argument! (or decrease it, if you must)' print if hasattr(signal, 'SIGALRM'): signal.signal(signal.SIGALRM, lambda signum, frame: reactor.callFromThread( sys.stderr.write, 'Watchdog timer went off at:\n' + ''.join(traceback.format_stack()) )) signal.siginterrupt(signal.SIGALRM, False) task.LoopingCall(signal.alarm, 30).start(1) if args.irc_announce: from twisted.words.protocols import irc class IRCClient(irc.IRCClient): nickname = 'p2pool%02i' % (random.randrange(100),) channel = net.ANNOUNCE_CHANNEL def lineReceived(self, line): if p2pool.DEBUG: print repr(line) irc.IRCClient.lineReceived(self, line) def signedOn(self): irc.IRCClient.signedOn(self) self.factory.resetDelay() self.join(self.channel) @defer.inlineCallbacks def new_share(share): if share.pow_hash <= share.header['bits'].target and abs(share.timestamp - time.time()) < 10*60: yield deferral.sleep(random.expovariate(1/60)) message = '\x02%s BLOCK FOUND by %s! %s%064x' % (net.NAME.upper(), bitcoin_data.script2_to_address(share.new_script, net.PARENT), net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) if all('%x' % (share.header_hash,) not in old_message for old_message in self.recent_messages): self.say(self.channel, message) self._remember_message(message) self.watch_id = tracker.verified.added.watch(new_share) self.recent_messages = [] def _remember_message(self, message): self.recent_messages.append(message) while len(self.recent_messages) > 100: self.recent_messages.pop(0) def privmsg(self, user, channel, message): if channel == self.channel: self._remember_message(message) def connectionLost(self, reason): tracker.verified.added.unwatch(self.watch_id) print 'IRC connection lost:', reason.getErrorMessage() class IRCClientFactory(protocol.ReconnectingClientFactory): protocol = IRCClient reactor.connectTCP("irc.freenode.net", 6667, IRCClientFactory()) @defer.inlineCallbacks def status_thread(): last_str = None last_time = 0 while True: yield deferral.sleep(3) try: height = tracker.get_height(best_share_var.value) this_str = 'P2Pool: %i shares in chain (%i verified/%i total) Peers: %i (%i incoming)' % ( height, len(tracker.verified.items), len(tracker.items), len(p2p_node.peers), sum(1 for peer in p2p_node.peers.itervalues() if peer.incoming), ) + (' FDs: %i R/%i W' % (len(reactor.getReaders()), len(reactor.getWriters())) if p2pool.DEBUG else '') datums, dt = wb.local_rate_monitor.get_datums_in_last() my_att_s = sum(datum['work']/dt for datum in datums) this_str += '\n Local: %sH/s in last %s Local dead on arrival: %s Expected time to share: %s' % ( math.format(int(my_att_s)), math.format_dt(dt), math.format_binomial_conf(sum(1 for datum in datums if datum['dead']), len(datums), 0.95), math.format_dt(2**256 / tracker.items[best_share_var.value].max_target / my_att_s) if my_att_s and best_share_var.value else '???', ) if height > 2: (stale_orphan_shares, stale_doa_shares), shares, _ = wb.get_stale_counts() stale_prop = p2pool_data.get_average_stale_prop(tracker, best_share_var.value, min(60*60//net.SHARE_PERIOD, height)) real_att_s = p2pool_data.get_pool_attempts_per_second(tracker, best_share_var.value, min(height - 1, 60*60//net.SHARE_PERIOD)) / (1 - stale_prop) this_str += '\n Shares: %i (%i orphan, %i dead) Stale rate: %s Efficiency: %s Current payout: %.4f %s' % ( shares, stale_orphan_shares, stale_doa_shares, math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95), math.format_binomial_conf(stale_orphan_shares + stale_doa_shares, shares, 0.95, lambda x: (1 - x)/(1 - stale_prop)), get_current_txouts().get(bitcoin_data.pubkey_hash_to_script2(my_pubkey_hash), 0)*1e-8, net.PARENT.SYMBOL, ) this_str += '\n Pool: %sH/s Stale rate: %.1f%% Expected time to block: %s' % ( math.format(int(real_att_s)), 100*stale_prop, math.format_dt(2**256 / bitcoind_work.value['bits'].target / real_att_s), ) for warning in p2pool_data.get_warnings(tracker, best_share_var.value, net, bitcoind_warning_var.value, bitcoind_work.value): print >>sys.stderr, '#'*40 print >>sys.stderr, '>>> Warning: ' + warning print >>sys.stderr, '#'*40 if this_str != last_str or time.time() > last_time + 15: print this_str last_str = this_str last_time = time.time() except: log.err() status_thread() except: reactor.stop() log.err(None, 'Fatal error:')
def get_user_details(self, user): desired_pseudoshare_target = None if "+" in user: user, desired_pseudoshare_difficulty_str = user.rsplit("+", 1) try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target( float(desired_pseudoshare_difficulty_str) ) except: pass desired_share_target = None if "/" in user: user, min_diff_str = user.rsplit("/", 1) try: desired_share_target = bitcoin_data.difficulty_to_target(float(min_diff_str)) except: pass set_adaptive_target = (self.diff_policy == "F") or ( (self.diff_policy == "D") and (desired_share_target is None) ) set_adaptive_pseudo = (self.diff_policy == "F") or ( (self.diff_policy == "D") and (desired_pseudoshare_target is None) ) user_rate = None pool_rate = None if set_adaptive_target: # calculate pool hashrate height = self.node.tracker.get_height(self.node.best_share_var.value) if height > 5: # we want at least 6 shares in chain stale_prop = p2pool_data.get_average_stale_prop( self.node.tracker, self.node.best_share_var.value, min(60 * 60 // self.node.net.SHARE_PERIOD, height), ) pool_rate = p2pool_data.get_pool_attempts_per_second( self.node.tracker, self.node.best_share_var.value, min(height - 1, 60 * 60 // self.node.net.SHARE_PERIOD), ) / (1 - stale_prop) if set_adaptive_pseudo or set_adaptive_target: # calculate user's hashrate datums, dt = self.local_rate_monitor.get_datums_in_last() npoints = sum(datum["user"] == user for datum in datums) if npoints > 5: # at least 6 hashrate datums for the user user_rate = 0 for datum in datums: if datum["user"] == user: user_rate += datum["work"] / dt if set_adaptive_target: desired_share_target = None if user_rate is not None and pool_rate is not None: if user_rate and pool_rate: desired_share_target = 20 * ( max( self.node.bitcoind_work.value["bits"].target * pool_rate, 2 ** 256 // (self.node.net.CHAIN_LENGTH * self.node.net.SHARE_PERIOD), ) // user_rate ) # min 20 shares per block AND min 20 shares per chain if set_adaptive_pseudo: desired_pseudoshare_target = None if user_rate is not None: if user_rate: desired_pseudoshare_target = 20 * ( 2 ** 256 // user_rate // (10 * 60) ) # min 20 pseudoshares per 10 minutes if desired_share_target is None: desired_share_target = 2 ** 265 - 1 if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash(user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def get_user_details(self, user): desired_pseudoshare_target = None if '+' in user: user, desired_pseudoshare_difficulty_str = user.rsplit('+', 1) try: desired_pseudoshare_target = bitcoin_data.difficulty_to_target( float(desired_pseudoshare_difficulty_str)) except: pass desired_share_target = None if '/' in user: user, min_diff_str = user.rsplit('/', 1) try: desired_share_target = bitcoin_data.difficulty_to_target( float(min_diff_str)) except: pass set_adaptive_target = (self.diff_policy == 'F') or ((self.diff_policy == 'D') and (desired_share_target is None)) set_adaptive_pseudo = (self.diff_policy == 'F') or ( (self.diff_policy == 'D') and (desired_pseudoshare_target is None)) user_rate = None pool_rate = None if set_adaptive_target: # calculate pool hashrate height = self.node.tracker.get_height( self.node.best_share_var.value) if height > 5: # we want at least 6 shares in chain stale_prop = p2pool_data.get_average_stale_prop( self.node.tracker, self.node.best_share_var.value, min(60 * 60 // self.node.net.SHARE_PERIOD, height)) pool_rate = p2pool_data.get_pool_attempts_per_second( self.node.tracker, self.node.best_share_var.value, min(height - 1, 60 * 60 // self.node.net.SHARE_PERIOD)) / (1 - stale_prop) if set_adaptive_pseudo or set_adaptive_target: # calculate user's hashrate datums, dt = self.local_rate_monitor.get_datums_in_last() npoints = sum(datum['user'] == user for datum in datums) if npoints > 5: # at least 6 hashrate datums for the user user_rate = 0 for datum in datums: if datum['user'] == user: user_rate += datum['work'] / dt if set_adaptive_target: desired_share_target = None if user_rate is not None and pool_rate is not None: if user_rate and pool_rate: desired_share_target = 20 * ( max( self.node.bitcoind_work.value['bits'].target * pool_rate, 2**256 // (self.node.net.CHAIN_LENGTH * self.node.net.SHARE_PERIOD)) // user_rate ) # min 20 shares per block AND min 20 shares per chain if set_adaptive_pseudo: desired_pseudoshare_target = None if user_rate is not None: if user_rate: desired_pseudoshare_target = 20 * ( 2**256 // user_rate // (10 * 60)) # min 20 pseudoshares per 10 minutes if desired_share_target is None: desired_share_target = 2**265 - 1 if random.uniform(0, 100) < self.worker_fee: pubkey_hash = self.my_pubkey_hash else: try: pubkey_hash = bitcoin_data.address_to_pubkey_hash( user, self.node.net.PARENT) except: # XXX blah pubkey_hash = self.my_pubkey_hash return user, pubkey_hash, desired_share_target, desired_pseudoshare_target
def run(): class FixedArgumentParser(argparse.ArgumentParser): def _read_args_from_files(self, arg_strings): # expand arguments referencing files new_arg_strings = [] for arg_string in arg_strings: # for regular arguments, just add them back into the list if not arg_string or arg_string[ 0] not in self.fromfile_prefix_chars: new_arg_strings.append(arg_string) # replace arguments referencing files with the file content else: try: args_file = open(arg_string[1:]) try: arg_strings = [] for arg_line in args_file.read().splitlines(): for arg in self.convert_arg_line_to_args( arg_line): arg_strings.append(arg) arg_strings = self._read_args_from_files( arg_strings) new_arg_strings.extend(arg_strings) finally: args_file.close() except IOError: err = sys.exc_info()[1] self.error(str(err)) # return the modified argument list return new_arg_strings def convert_arg_line_to_args(self, arg_line): return [arg for arg in arg_line.split() if arg.strip()] parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__, ), fromfile_prefix_chars='@') parser.add_argument('--version', action='version', version=p2pool.__version__) parser.add_argument('--net', help='use specified network (default: bitcoin)', action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name') parser.add_argument('--testnet', help='''use the network's testnet''', action='store_const', const=True, default=False, dest='testnet') parser.add_argument('--debug', help='enable debugging mode', action='store_const', const=True, default=False, dest='debug') parser.add_argument( '-a', '--address', help= 'generate payouts to this address (default: <address requested from bitcoind>)', type=str, action='store', default=None, dest='address') parser.add_argument('--logfile', help='''log to this file (default: data/<NET>/log)''', type=str, action='store', default=None, dest='logfile') parser.add_argument( '--merged-url', help= 'call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)', type=str, action='store', default=None, dest='merged_url') parser.add_argument( '--merged-userpass', help= 'use this user and password when requesting merged mining work (example: ncuser:ncpass)', type=str, action='store', default=None, dest='merged_userpass') parser.add_argument( '--give-author', metavar='DONATION_PERCENTAGE', help= 'donate this percentage of work to author of p2pool (default: 0.5)', type=float, action='store', default=0.5, dest='donation_percentage') p2pool_group = parser.add_argument_group('p2pool interface') p2pool_group.add_argument( '--p2pool-port', metavar='PORT', help= 'use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='p2pool_port') p2pool_group.add_argument( '-n', '--p2pool-node', metavar='ADDR[:PORT]', help= 'connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses', type=str, action='append', default=[], dest='p2pool_nodes') parser.add_argument( '--disable-upnp', help= '''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''', action='store_false', default=True, dest='upnp') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument( '-w', '--worker-port', metavar='PORT', help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='worker_port') worker_group.add_argument( '-f', '--fee', metavar='FEE_PERCENTAGE', help= '''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''', type=float, action='store', default=0, dest='worker_fee') bitcoind_group = parser.add_argument_group('bitcoind interface') bitcoind_group.add_argument( '--bitcoind-address', metavar='BITCOIND_ADDRESS', help='connect to this address (default: 127.0.0.1)', type=str, action='store', default='127.0.0.1', dest='bitcoind_address') bitcoind_group.add_argument( '--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help='''connect to JSON-RPC interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') bitcoind_group.add_argument( '--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help='''connect to P2P interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') bitcoind_group.add_argument( metavar='BITCOIND_RPCUSER', help='bitcoind RPC interface username (default: <empty>)', type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username') bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD', help='bitcoind RPC interface password', type=str, action='store', dest='bitcoind_rpc_password') args = parser.parse_args() if args.debug: p2pool.DEBUG = True net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')] datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if args.logfile is None: args.logfile = os.path.join(datadir_path, 'log') class EncodeReplacerPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.softspace = 0 def write(self, data): if isinstance(data, unicode): data = data.encode(self.inner_file.encoding, 'replace') self.inner_file.write(data) def flush(self): self.inner_file.flush() class LogFile(object): def __init__(self, filename): self.filename = filename self.inner_file = None self.reopen() def reopen(self): if self.inner_file is not None: self.inner_file.close() open(self.filename, 'a').close() f = open(self.filename, 'rb') f.seek(0, os.SEEK_END) length = f.tell() if length > 100 * 1000 * 1000: f.seek(-1000 * 1000, os.SEEK_END) while True: if f.read(1) in ('', '\n'): break data = f.read() f.close() f = open(self.filename, 'wb') f.write(data) f.close() self.inner_file = codecs.open(self.filename, 'a', 'utf-8') def write(self, data): self.inner_file.write(data) def flush(self): self.inner_file.flush() class TeePipe(object): def __init__(self, outputs): self.outputs = outputs def write(self, data): for output in self.outputs: output.write(data) def flush(self): for output in self.outputs: output.flush() class TimestampingPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.buf = '' self.softspace = 0 def write(self, data): buf = self.buf + data lines = buf.split('\n') for line in lines[:-1]: self.inner_file.write( '%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line)) self.inner_file.flush() self.buf = lines[-1] def flush(self): pass class AbortPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.softspace = 0 def write(self, data): try: self.inner_file.write(data) except: sys.stdout = sys.__stdout__ log.DefaultObserver.stderr = sys.stderr = sys.__stderr__ raise def flush(self): self.inner_file.flush() logfile = LogFile(args.logfile) sys.stdout = sys.stderr = log.DefaultObserver.stderr = AbortPipe( TimestampingPipe(TeePipe([EncodeReplacerPipe(sys.stderr), logfile]))) if hasattr(signal, "SIGUSR1"): def sigusr1(signum, frame): print 'Caught SIGUSR1, closing %r...' % (args.logfile, ) logfile.reopen() print '...and reopened %r after catching SIGUSR1.' % ( args.logfile, ) signal.signal(signal.SIGUSR1, sigusr1) task.LoopingCall(logfile.reopen).start(5) if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.worker_port is None: args.worker_port = net.WORKER_PORT if args.address is not None: try: args.pubkey_hash = bitcoin_data.address_to_pubkey_hash( args.address, net) except Exception, e: parser.error('error parsing address: ' + repr(e))
def run(): class FixedArgumentParser(argparse.ArgumentParser): def _read_args_from_files(self, arg_strings): # expand arguments referencing files new_arg_strings = [] for arg_string in arg_strings: # for regular arguments, just add them back into the list if not arg_string or arg_string[0] not in self.fromfile_prefix_chars: new_arg_strings.append(arg_string) # replace arguments referencing files with the file content else: try: args_file = open(arg_string[1:]) try: arg_strings = [] for arg_line in args_file.read().splitlines(): for arg in self.convert_arg_line_to_args(arg_line): arg_strings.append(arg) arg_strings = self._read_args_from_files(arg_strings) new_arg_strings.extend(arg_strings) finally: args_file.close() except IOError: err = sys.exc_info()[1] self.error(str(err)) # return the modified argument list return new_arg_strings def convert_arg_line_to_args(self, arg_line): return [arg for arg in arg_line.split() if arg.strip()] parser = FixedArgumentParser(description='p2pool (version %s)' % (p2pool.__version__,), fromfile_prefix_chars='@') parser.add_argument('--version', action='version', version=p2pool.__version__) parser.add_argument('--net', help='use specified network (default: bitcoin)', action='store', choices=sorted(networks.realnets), default='bitcoin', dest='net_name') parser.add_argument('--testnet', help='''use the network's testnet''', action='store_const', const=True, default=False, dest='testnet') parser.add_argument('--debug', help='enable debugging mode', action='store_const', const=True, default=False, dest='debug') parser.add_argument('-a', '--address', help='generate payouts to this address (default: <address requested from bitcoind>)', type=str, action='store', default=None, dest='address') parser.add_argument('--logfile', help='''log to this file (default: data/<NET>/log)''', type=str, action='store', default=None, dest='logfile') parser.add_argument('--merged-url', help='call getauxblock on this url to get work for merged mining (example: http://127.0.0.1:10332/)', type=str, action='store', default=None, dest='merged_url') parser.add_argument('--merged-userpass', help='use this user and password when requesting merged mining work (example: ncuser:ncpass)', type=str, action='store', default=None, dest='merged_userpass') parser.add_argument('--give-author', metavar='DONATION_PERCENTAGE', help='donate this percentage of work to author of p2pool (default: 0.5)', type=float, action='store', default=0.5, dest='donation_percentage') p2pool_group = parser.add_argument_group('p2pool interface') p2pool_group.add_argument('--p2pool-port', metavar='PORT', help='use port PORT to listen for connections (forward this port from your router!) (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.P2P_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='p2pool_port') p2pool_group.add_argument('-n', '--p2pool-node', metavar='ADDR[:PORT]', help='connect to existing p2pool node at ADDR listening on port PORT (defaults to default p2pool P2P port) in addition to builtin addresses', type=str, action='append', default=[], dest='p2pool_nodes') parser.add_argument('--disable-upnp', help='''don't attempt to use UPnP to forward p2pool's P2P port from the Internet to this computer''', action='store_false', default=True, dest='upnp') worker_group = parser.add_argument_group('worker interface') worker_group.add_argument('-w', '--worker-port', metavar='PORT', help='listen on PORT for RPC connections from miners (default: %s)' % ', '.join('%s:%i' % (n.NAME, n.WORKER_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='worker_port') worker_group.add_argument('-f', '--fee', metavar='FEE_PERCENTAGE', help='''charge workers mining to their own bitcoin address (by setting their miner's username to a bitcoin address) this percentage fee to mine on your p2pool instance. Amount displayed at http://127.0.0.1:WORKER_PORT/fee (default: 0)''', type=float, action='store', default=0, dest='worker_fee') bitcoind_group = parser.add_argument_group('bitcoind interface') bitcoind_group.add_argument('--bitcoind-address', metavar='BITCOIND_ADDRESS', help='connect to this address (default: 127.0.0.1)', type=str, action='store', default='127.0.0.1', dest='bitcoind_address') bitcoind_group.add_argument('--bitcoind-rpc-port', metavar='BITCOIND_RPC_PORT', help='''connect to JSON-RPC interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_RPC_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='bitcoind_rpc_port') bitcoind_group.add_argument('--bitcoind-p2p-port', metavar='BITCOIND_P2P_PORT', help='''connect to P2P interface at this port (default: %s)''' % ', '.join('%s:%i' % (n.NAME, n.BITCOIN_P2P_PORT) for _, n in sorted(networks.realnets.items())), type=int, action='store', default=None, dest='bitcoind_p2p_port') bitcoind_group.add_argument(metavar='BITCOIND_RPCUSER', help='bitcoind RPC interface username (default: <empty>)', type=str, action='store', default='', nargs='?', dest='bitcoind_rpc_username') bitcoind_group.add_argument(metavar='BITCOIND_RPCPASSWORD', help='bitcoind RPC interface password', type=str, action='store', dest='bitcoind_rpc_password') args = parser.parse_args() if args.debug: p2pool.DEBUG = True net = networks.nets[args.net_name + ('_testnet' if args.testnet else '')] datadir_path = os.path.join(os.path.dirname(sys.argv[0]), 'data', net.NAME) if not os.path.exists(datadir_path): os.makedirs(datadir_path) if args.logfile is None: args.logfile = os.path.join(datadir_path, 'log') class EncodeReplacerPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.softspace = 0 def write(self, data): if isinstance(data, unicode): data = data.encode(self.inner_file.encoding, 'replace') self.inner_file.write(data) def flush(self): self.inner_file.flush() class LogFile(object): def __init__(self, filename): self.filename = filename self.inner_file = None self.reopen() def reopen(self): if self.inner_file is not None: self.inner_file.close() open(self.filename, 'a').close() f = open(self.filename, 'rb') f.seek(0, os.SEEK_END) length = f.tell() if length > 100*1000*1000: f.seek(-1000*1000, os.SEEK_END) while True: if f.read(1) in ('', '\n'): break data = f.read() f.close() f = open(self.filename, 'wb') f.write(data) f.close() self.inner_file = codecs.open(self.filename, 'a', 'utf-8') def write(self, data): self.inner_file.write(data) def flush(self): self.inner_file.flush() class TeePipe(object): def __init__(self, outputs): self.outputs = outputs def write(self, data): for output in self.outputs: output.write(data) def flush(self): for output in self.outputs: output.flush() class TimestampingPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.buf = '' self.softspace = 0 def write(self, data): buf = self.buf + data lines = buf.split('\n') for line in lines[:-1]: self.inner_file.write('%s %s\n' % (datetime.datetime.now().strftime("%H:%M:%S.%f"), line)) self.inner_file.flush() self.buf = lines[-1] def flush(self): pass class AbortPipe(object): def __init__(self, inner_file): self.inner_file = inner_file self.softspace = 0 def write(self, data): try: self.inner_file.write(data) except: sys.stdout = sys.__stdout__ log.DefaultObserver.stderr = sys.stderr = sys.__stderr__ raise def flush(self): self.inner_file.flush() logfile = LogFile(args.logfile) sys.stdout = sys.stderr = log.DefaultObserver.stderr = AbortPipe(TimestampingPipe(TeePipe([EncodeReplacerPipe(sys.stderr), logfile]))) if hasattr(signal, "SIGUSR1"): def sigusr1(signum, frame): print 'Caught SIGUSR1, closing %r...' % (args.logfile,) logfile.reopen() print '...and reopened %r after catching SIGUSR1.' % (args.logfile,) signal.signal(signal.SIGUSR1, sigusr1) task.LoopingCall(logfile.reopen).start(5) if args.bitcoind_rpc_port is None: args.bitcoind_rpc_port = net.BITCOIN_RPC_PORT if args.bitcoind_p2p_port is None: args.bitcoind_p2p_port = net.BITCOIN_P2P_PORT if args.p2pool_port is None: args.p2pool_port = net.P2P_PORT if args.worker_port is None: args.worker_port = net.WORKER_PORT if args.address is not None: try: args.pubkey_hash = bitcoin_data.address_to_pubkey_hash(args.address, net) except Exception, e: parser.error('error parsing address: ' + repr(e))