def __init__(self, factory, bitcoind, shares, known_verified_share_hashes, net): self.factory = factory self.bitcoind = bitcoind self.net = net self.cur_share_ver = None self.known_txs_var = variable.VariableDict({}) # hash -> tx self.mining_txs_var = variable.Variable({}) # hash -> tx self.mining2_txs_var = variable.Variable({}) # hash -> tx self.best_share_var = variable.Variable(None) self.desired_var = variable.Variable(None) self.txidcache = {} self.feecache = {} self.feefifo = [] self.punish = False self.tracker = p2pool_data.OkayTracker(self.net) for share in shares: self.tracker.add(share) for share_hash in known_verified_share_hashes: if share_hash in self.tracker.items: self.tracker.verified.add(self.tracker.items[share_hash]) self.p2p_node = None # overwritten externally
def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, known_txs_var=variable.Variable({}), mining_txs_var=variable.Variable({}), advertise_ip=True, external_ip=None): self.best_share_hash_func = best_share_hash_func self.port = port self.net = net self.addr_store = dict(addr_store) self.connect_addrs = connect_addrs self.preferred_storage = preferred_storage self.known_txs_var = known_txs_var self.mining_txs_var = mining_txs_var self.advertise_ip = advertise_ip self.external_ip = external_ip self.traffic_happened = variable.Event() self.nonce = random.randrange(2**64) self.peers = {} self.bans = {} # address -> end_time self.clientfactory = ClientFactory(self, desired_outgoing_conns, max_outgoing_attempts) self.serverfactory = ServerFactory(self, max_incoming_conns) self.running = False
def __init__(self, net): self.net = net self.conn = variable.Variable(None) self.new_block = variable.Event() self.new_tx = variable.Event() self.new_headers = variable.Event()
def __init__(self): self.blocks = [ 0x00000000000132b9afeca5e9a2fdf4477338df6dcff1342300240bc70397c4bb ] self.headers = { 0x132b9afeca5e9a2fdf4477338df6dcff1342300240bc70397c4bb: { 'nonce': 2093330011, 'timestamp': 1452288263, 'merkle_root': 0x43dda83285fae26de9a97331f617392261c77495debe97b7e18b1faf38d1ef8, 'version': 3, 'previous_block': 1048610514577342396345362905164852351970507722694242579238530L, 'bits': unio_data.FloatingInteger( bits=0x1b0d642e, target= 0x44b9f20000000000000000000000000000000000000000000000L), } } self.conn = variable.Variable(self) self.new_headers = variable.Event() self.new_block = variable.Event() self.new_tx = variable.Event()
def get_height_rel_highest_func(yrmixd, factory, best_block_func, net): if '\ngetblock ' in (yield deferral.retry()(yrmixd.rpc_help)()): @deferral.DeferredCacher @defer.inlineCallbacks def height_cacher(block_hash): try: x = yield yrmixd.rpc_getblock('%x' % (block_hash, )) except jsonrpc.Error_for_code(-5): # Block not found if not p2pool.DEBUG: raise deferral.RetrySilentlyException() else: raise defer.returnValue(x['blockcount'] if 'blockcount' in x else x['height']) best_height_cached = variable.Variable( (yield deferral.retry()(height_cacher)(best_block_func()))) def get_height_rel_highest(block_hash): this_height = height_cacher.call_now(block_hash, 0) best_height = height_cacher.call_now(best_block_func(), 0) best_height_cached.set( max(best_height_cached.value, this_height, best_height)) return this_height - best_height_cached.value else: get_height_rel_highest = HeightTracker( best_block_func, factory, 5 * net.SHARE_PERIOD * net.CHAIN_LENGTH / net.PARENT.BLOCK_PERIOD).get_height_rel_highest defer.returnValue(get_height_rel_highest)
def test_nodes(self): N = 3 SHARES = 600 bitd = mued() nodes = [] for i in xrange(N): nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], []))) yield deferral.sleep(3) for i in xrange(SHARES): proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port), headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password'))) blah = yield proxy.rpc_getwork() yield proxy.rpc_getwork(blah['data']) yield deferral.sleep(.05) print i print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value]) # crawl web pages from p2pool import web stop_event = variable.Event() web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event) web2_port = reactor.listenTCP(0, server.Site(web2_root)) for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]: if name in ['web/graph_data', 'web/share', 'web/share_data']: continue print print name try: res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name)) except: import traceback traceback.print_exc() else: print repr(res)[:100] print yield web2_port.stopListening() stop_event.happened() del web2_root yield deferral.sleep(3) for i, n in enumerate(nodes): assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items)) assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value)) assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share) assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share for n in nodes: yield n.stop() del nodes, n import gc gc.collect() gc.collect() gc.collect() yield deferral.sleep(20) # waiting for work_poller to exit
def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event()): self._message_prefix = message_prefix self._max_payload_length = max_payload_length self.dataReceived2 = datachunker.DataChunker(self.dataReceiver()) self.paused_var = variable.Variable(False) self.traffic_happened = traffic_happened
class factory(object): new_headers = variable.Event() new_block = variable.Event() new_tx = variable.Event() conn = variable.Variable(bitcoinp2p) @classmethod def getProtocol(self): return bitcoinp2p
def getwork(self, request, long_poll=False): request_id = get_id(request) memory = get_memory(request) id = random.randrange(10000) if p2pool.DEBUG: print 'POLL %i START long_poll=%r user_agent=%r x-work-identifier=%r user=%r' % ( id, long_poll, request.getHeader('User-Agent'), request.getHeader('X-Work-Identifier'), get_username(request)) if request_id not in self.worker_views: self.worker_views[request_id] = variable.Variable( (0, (None, None))) # times, (previous_block/-1, previous_block/-2) thought_times, thought_work = self.worker_views[request_id].value if long_poll and thought_times == self.new_work_event.times: if p2pool.DEBUG: print 'POLL %i WAITING user=%r' % (id, get_username(request)) yield defer.DeferredList([ self.new_work_event.get_deferred(), self.worker_views[request_id].changed.get_deferred() ], fireOnOneCallback=True) yield self.holds.wait_hold(request_id) res, identifier = self.compute(request) if thought_work[ -1] is not None and self.new_work_event.times != thought_times and any( x is None or res.previous_block == x for x in thought_work[-memory or len(thought_work):]): # clients won't believe the update res = res.update(previous_block=random.randrange(2**256)) if p2pool.DEBUG: print 'POLL %i FAKED user=%r' % (id, get_username(request)) self.holds.set_hold(request_id, .01) self.worker_views[request_id].set( (self.new_work_event.times if long_poll else thought_times, (thought_work[-1], res.previous_block))) if p2pool.DEBUG: print 'POLL %i END %s user=%r' % ( id, p2pool_data.format_hash(identifier), get_username(request) ) # XXX identifier is hack defer.returnValue(res.getwork(identifier=str(identifier)))
def start(self): stop_signal = variable.Event() self.stop = stop_signal.happened # DASHD WORK self.dashd_work = variable.Variable( (yield helper.getwork(self.dashd, self.net))) @defer.inlineCallbacks def work_poller(): while stop_signal.times == 0: flag = self.factory.new_block.get_deferred() try: self.dashd_work.set((yield helper.getwork( self.dashd, self.net, self.dashd_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK self.best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (self.net.PARENT.POW_FUNC( dash_data.block_header_type.pack(new_header)) <= self.dashd_work.value['bits'].target): return dashd_best_block = self.dashd_work.value['previous_block'] if (self.best_block_header.value is None or (new_header['previous_block'] == dashd_best_block and self.net.PARENT.BLOCKHASH_FUNC( dash_data.block_header_type.pack( self.best_block_header.value)) == dashd_best_block ) # new is child of current and previous is current or (self.net.PARENT.BLOCKHASH_FUNC( dash_data.block_header_type.pack(new_header)) == dashd_best_block and self.best_block_header.value['previous_block'] != dashd_best_block) ): # new is current and previous is not a child of current self.best_block_header.set(new_header) self.handle_header = handle_header @defer.inlineCallbacks def poll_header(): if self.factory.conn.value is None: return handle_header((yield self.factory.conn.value.get_block_header( self.dashd_work.value['previous_block']))) self.dashd_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')( poll_header)() # BEST SHARE self.known_txs_var = variable.VariableDict({}) # hash -> tx self.mining_txs_var = variable.Variable({}) # hash -> tx self.get_height_rel_highest = yield height_tracker.get_height_rel_highest_func( self.dashd, self.factory, lambda: self.dashd_work.value['previous_block'], self.net) self.best_share_var = variable.Variable(None) self.desired_var = variable.Variable(None) self.dashd_work.changed.watch(lambda _: self.set_best_share()) self.set_best_share() # setup p2p logic and join p2pool network # update mining_txs according to getwork results @self.dashd_work.changed.run_and_watch def _(_=None): new_mining_txs = {} added_known_txs = {} for tx_hash, tx in zip(self.dashd_work.value['transaction_hashes'], self.dashd_work.value['transactions']): new_mining_txs[tx_hash] = tx added_known_txs[tx_hash] = tx self.mining_txs_var.set(new_mining_txs) self.known_txs_var.add(added_known_txs) # add p2p transactions from dashd to known_txs @self.factory.new_tx.watch def _(tx): self.known_txs_var.add({ dash_data.hash256(dash_data.tx_type.pack(tx)): tx, }) # forward transactions seen to dashd @self.known_txs_var.transitioned.watch @defer.inlineCallbacks def _(before, after): yield deferral.sleep(random.expovariate(1 / 1)) if self.factory.conn.value is None: return for tx_hash in set(after) - set(before): self.factory.conn.value.send_tx(tx=after[tx_hash]) @self.tracker.verified.added.watch def _(share): if not (share.pow_hash <= share.header['bits'].target): return block = share.as_block(self.tracker, self.known_txs_var.value) if block is None: print >> sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s dash: %s%064x' % ( p2pool_data.format_hash( share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) return helper.submit_block(block, True, self.factory, self.dashd, self.dashd_work, self.net) print print 'GOT BLOCK FROM PEER! Passing to dashd! %s dash: %s%064x' % ( p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print self.factory.new_block.happened(share.hash) def forget_old_txs(): new_known_txs = {} if self.p2p_node is not None: for peer in self.p2p_node.peers.itervalues(): new_known_txs.update(peer.remembered_txs) new_known_txs.update(self.mining_txs_var.value) for share in self.tracker.get_chain( self.best_share_var.value, min(120, self.tracker.get_height(self.best_share_var.value))): for tx_hash in share.new_transaction_hashes: if tx_hash in self.known_txs_var.value: new_known_txs[tx_hash] = self.known_txs_var.value[ tx_hash] self.known_txs_var.set(new_known_txs) t = deferral.RobustLoopingCall(forget_old_txs) t.start(10) stop_signal.watch(t.stop) t = deferral.RobustLoopingCall(self.clean_tracker) t.start(5) stop_signal.watch(t.stop)
1351658517, 'merkle_root': 2282849479936278423916707524932131168473430114569971665822757638339486597658L, 'version': 1, 'previous_block': 1048610514577342396345362905164852351970507722694242579238530L, 'bits': bitcoin_data.FloatingInteger( bits=0x1a0513c5, target= 0x513c50000000000000000000000000000000000000000000000L), } } self.conn = variable.Variable(self) self.new_headers = variable.Event() self.new_block = variable.Event() self.new_tx = variable.Event() # p2p factory def getProtocol(self): return self # p2p protocol def send_block(self, block): pass def send_tx(self, tx):
def start(self): stop_signal = variable.Event() self.stop = stop_signal.happened # BITCOIND WORK self.bitcoind_work = variable.Variable((yield helper.getwork(self.bitcoind))) @defer.inlineCallbacks def work_poller(): while stop_signal.times == 0: flag = self.factory.new_block.get_deferred() try: self.bitcoind_work.set((yield helper.getwork( self.bitcoind, self.bitcoind_work.value['use_getblocktemplate']))) except: log.err() yield defer.DeferredList([flag, deferral.sleep(15)], fireOnOneCallback=True) work_poller() # PEER WORK self.best_block_header = variable.Variable(None) def handle_header(new_header): # check that header matches current target if not (self.net.PARENT.POW_FUNC( bitcoin_data.block_header_type.pack(new_header)) <= self.bitcoind_work.value['bits'].target): return bitcoind_best_block = self.bitcoind_work.value['previous_block'] if (self.best_block_header.value is None or (new_header['previous_block'] == bitcoind_best_block and bitcoin_data.hash256( bitcoin_data.block_header_type.pack( self.best_block_header.value)) == bitcoind_best_block ) # new is child of current and previous is current or (bitcoin_data.hash256( bitcoin_data.block_header_type.pack(new_header)) == bitcoind_best_block and self.best_block_header.value['previous_block'] != bitcoind_best_block) ): # new is current and previous is not a child of current self.best_block_header.set(new_header) self.handle_header = handle_header @defer.inlineCallbacks def poll_header(): if self.factory.conn.value is None: return handle_header((yield self.factory.conn.value.get_block_header( self.bitcoind_work.value['previous_block']))) self.bitcoind_work.changed.watch(lambda _: poll_header()) yield deferral.retry('Error while requesting best block header:')( poll_header)() # BEST SHARE self.known_txs_var = variable.Variable({}) # hash -> tx self.mining_txs_var = variable.Variable({}) # hash -> tx self.get_height_rel_highest = yield height_tracker.get_height_rel_highest_func( self.bitcoind, self.factory, lambda: self.bitcoind_work.value['previous_block'], self.net) self.best_share_var = variable.Variable(None) self.desired_var = variable.Variable(None) self.bitcoind_work.changed.watch(lambda _: self.set_best_share()) self.set_best_share() # setup p2p logic and join p2pool network # update mining_txs according to getwork results @self.bitcoind_work.changed.run_and_watch def _(_=None): new_mining_txs = {} new_known_txs = dict(self.known_txs_var.value) for tx_hash, tx in zip( self.bitcoind_work.value['transaction_hashes'], self.bitcoind_work.value['transactions']): new_mining_txs[tx_hash] = tx new_known_txs[tx_hash] = tx self.mining_txs_var.set(new_mining_txs) self.known_txs_var.set(new_known_txs) # add p2p transactions from bitcoind to known_txs @self.factory.new_tx.watch def _(tx): new_known_txs = dict(self.known_txs_var.value) new_known_txs[bitcoin_data.hash256( bitcoin_data.tx_type.pack(tx))] = tx self.known_txs_var.set(new_known_txs) # forward transactions seen to bitcoind @self.known_txs_var.transitioned.watch @defer.inlineCallbacks def _(before, after): yield deferral.sleep(random.expovariate(1 / 1)) if self.factory.conn.value is None: return for tx_hash in set(after) - set(before): self.factory.conn.value.send_tx(tx=after[tx_hash]) @self.tracker.verified.added.watch def _(share): if not (share.pow_hash <= share.header['bits'].target): return block = share.as_block(self.tracker, self.known_txs_var.value) if block is None: print >> sys.stderr, 'GOT INCOMPLETE BLOCK FROM PEER! %s bitcoin: %s%064x' % ( p2pool_data.format_hash( share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) return helper.submit_block(block, True, self.factory, self.bitcoind, self.bitcoind_work, self.net) print print 'GOT BLOCK FROM PEER! Passing to bitcoind! %s bitcoin: %s%064x' % ( p2pool_data.format_hash(share.hash), self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash) print # Code to send pushover and notification when a block is found by a peer (ikolubr - Mar 31st, 2018) if self.net.USE_PUSHOVER_BLOCK: conn = httplib.HTTPSConnection("api.pushover.net:443") conn.request( "POST", "/1/messages.json", urllib.urlencode({ "token": self.net.PUSHOVER_APP_TOKEN, "user": self.net.PUSHOVER_USER_KEY, "message": 'FOUND BLOCK! %s%064x' % (self.net.PARENT.BLOCK_EXPLORER_URL_PREFIX, share.header_hash), }), {"Content-type": "application/x-www-form-urlencoded"}) conn.getresponse() def forget_old_txs(): new_known_txs = {} if self.p2p_node is not None: for peer in self.p2p_node.peers.itervalues(): new_known_txs.update(peer.remembered_txs) new_known_txs.update(self.mining_txs_var.value) for share in self.tracker.get_chain( self.best_share_var.value, min(120, self.tracker.get_height(self.best_share_var.value))): for tx_hash in share.new_transaction_hashes: if tx_hash in self.known_txs_var.value: new_known_txs[tx_hash] = self.known_txs_var.value[ tx_hash] self.known_txs_var.set(new_known_txs) t = deferral.RobustLoopingCall(forget_old_txs) t.start(10) stop_signal.watch(t.stop) t = deferral.RobustLoopingCall(self.clean_tracker) t.start(5) stop_signal.watch(t.stop)