def test_nodes(self): N = 3 SHARES = 600 bitd = mued() nodes = [] for i in xrange(N): nodes.append((yield MiniNode.start(mynet, bitd, bitd, [mn.n.p2p_node.serverfactory.listen_port.getHost().port for mn in nodes], []))) yield deferral.sleep(3) for i in xrange(SHARES): proxy = jsonrpc.HTTPProxy('http://127.0.0.1:' + str(random.choice(nodes).web_port.getHost().port), headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password'))) blah = yield proxy.rpc_getwork() yield proxy.rpc_getwork(blah['data']) yield deferral.sleep(.05) print i print type(nodes[0].n.tracker.items[nodes[0].n.best_share_var.value]) # crawl web pages from p2pool import web stop_event = variable.Event() web2_root = web.get_web_root(nodes[0].wb, tempfile.mkdtemp(), variable.Variable(None), stop_event) web2_port = reactor.listenTCP(0, server.Site(web2_root)) for name in web2_root.listNames() + ['web/' + x for x in web2_root.getChildWithDefault('web', None).listNames()]: if name in ['web/graph_data', 'web/share', 'web/share_data']: continue print print name try: res = yield client.getPage('http://127.0.0.1:%i/%s' % (web2_port.getHost().port, name)) except: import traceback traceback.print_exc() else: print repr(res)[:100] print yield web2_port.stopListening() stop_event.happened() del web2_root yield deferral.sleep(3) for i, n in enumerate(nodes): assert len(n.n.tracker.items) == SHARES, (i, len(n.n.tracker.items)) assert n.n.tracker.verified.get_height(n.n.best_share_var.value) == SHARES, (i, n.n.tracker.verified.get_height(n.n.best_share_var.value)) assert type(n.n.tracker.items[nodes[0].n.best_share_var.value]) is (data.Share.SUCCESSOR if data.Share.SUCCESSOR is not None else data.Share) assert type(n.n.tracker.items[n.n.tracker.get_nth_parent_hash(nodes[0].n.best_share_var.value, SHARES - 5)]) is data.Share for n in nodes: yield n.stop() del nodes, n import gc gc.collect() gc.collect() gc.collect() yield deferral.sleep(20) # waiting for work_poller to exit
def test_node(self): bitd = dcrd() mm_root = resource.Resource() mm_root.putChild('', jsonrpc.HTTPServer(mm_provider)) mm_port = reactor.listenTCP(0, server.Site(mm_root)) n = node.Node(bitd, bitd, [], [], mynet) yield n.start() wb = work.WorkerBridge(node=n, my_pubkey_hash=42, donation_percentage=2, merged_urls=[('http://127.0.0.1:%i' % (mm_port.getHost().port, ), '')], worker_fee=3, args=math.Object(donation_percentage=2, address='foo', worker_fee=3, timeaddresses=1000), pubkeys=main.keypool(), dcrd=bitd) web_root = resource.Resource() worker_interface.WorkerInterface(wb).attach_to(web_root) port = reactor.listenTCP(0, server.Site(web_root)) proxy = jsonrpc.HTTPProxy( 'http://127.0.0.1:' + str(port.getHost().port), headers=dict(Authorization='Basic ' + base64.b64encode('user/0:password'))) yield deferral.sleep(3) for i in xrange(100): blah = yield proxy.rpc_getwork() yield proxy.rpc_getwork(blah['data']) yield deferral.sleep(3) assert len(n.tracker.items) == 100 assert n.tracker.verified.get_height(n.best_share_var.value) == 100 wb.stop() n.stop() yield port.stopListening() del n, wb, web_root, port, proxy import gc gc.collect() gc.collect() gc.collect() yield deferral.sleep(20) # waiting for work_poller to exit yield mm_port.stopListening()
def main(): datadir = sys.argv[1] b = jsonrpc.HTTPProxy(sys.argv[2], dict( Authorization='Basic ' + base64.b64encode( sys.argv[3] + ':' + sys.argv[4] ), ), timeout=30) p2pool_base_urls = sys.argv[5:] @defer.inlineCallbacks def get(blah): for p2pool_base_url in util_math.shuffled(p2pool_base_urls): url = p2pool_base_url.rstrip('/') + '/' + blah print 'trying', url try: d = yield client.getPage(url) except Exception: traceback.print_exc() else: defer.returnValue(json.loads(d)) raise ValueError('no good p2pool servers') # read old old_blocks = json.loads(_atomic_read( os.path.join(datadir, 'blocks'), '[]', )) old_stats = json.loads(_atomic_read( os.path.join(datadir, 'stats'), '{"rates": [], "maxRate": 0, "users": [], "maxUsers": 0}', )) # update #print stats web_local_stats = yield get('local_stats') web_global_stats = yield get('global_stats') web_users = yield get('users') web_current_payouts = yield get('current_payouts') difficulty = bitcoin_data.target_to_difficulty( bitcoin_data.average_attempts_to_target(web_local_stats['attempts_to_block'])) users = [dict( Hashrate=util_math.format(int(frac * web_global_stats['pool_hash_rate'] + 1/2), add_space=True) + 'H/s', Address=addr, ) for addr, frac in sorted(web_users.iteritems(), key=lambda (k, v): -v)] payouts = [dict( Address=addr, Payment=amt, ) for addr, amt in sorted(web_current_payouts.iteritems(), key=lambda (k, v): -v)] def update_timeseries(x, value, now_time): lastlast_time = x[-2][0] last_time = x[-1][0] next_time = last_time + (last_time - lastlast_time) if abs(now_time - last_time) < abs(now_time - next_time): # update last old_value = x[-1][1] old_weight = x[-1][2] if len(x[-1]) >= 3 else 1e9 return x[:-1] + [ [last_time, (old_value * old_weight + value)/(old_weight + 1), old_weight + 1] ] else: # start next return x + [ [next_time, value, 1] ] stats = dict( rates=update_timeseries(old_stats['rates'], web_global_stats['pool_hash_rate']/1e9, time.time()*1e3), maxRate=max(old_stats['maxRate'], web_global_stats['pool_hash_rate']/1e9), users=update_timeseries(old_stats['users'], len(web_users), time.time()*1e3), maxUsers=max(old_stats['maxUsers'], len(web_users)), ) blocks = list(old_blocks) blocks_dict = dict((block['Id'], block) for block in blocks) assert len(blocks_dict) == len(blocks) def handle_block(block_data): block = block_data['block'] txouts = block['txs'][0]['tx_outs'] if len(txouts) < 25: return if not txouts[-1]['script'].startswith('\x6a'): return if len(txouts[-1]['script']) < 33: return if txouts[-1]['value'] != 0: return if txouts[-2]['script'] != p2pool_data.DONATION_SCRIPT: return hash_str = '%064x' % bitcoin_data.hash256(bitcoin_data.block_header_type.pack(block['header'])) print hash_str if hash_str not in blocks_dict: print 'inserted' x = dict( Id=hash_str, PrevBlock='%064x' % block['header']['previous_block'], GenerationTxHash='%064x' % block_data['gentx_hash'], BlockHeight=block_data['height'], Difficulty=bitcoin_data.target_to_difficulty(block['header']['bits'].target), Timestamp=block['header']['timestamp'], IsOrphaned=None, # XXX ) blocks.append(x) blocks_dict[hash_str] = x yield get_blocks(b, 400, handle_block) blocks.sort(key=lambda x: -x['Timestamp']) # write _atomic_write(os.path.join(datadir, 'blocks_5'), json.dumps(blocks[:5])) _atomic_write(os.path.join(datadir, 'blocks_100'), json.dumps(blocks[:100])) _atomic_write(os.path.join(datadir, 'blocks'), json.dumps(blocks)) _atomic_write(os.path.join(datadir, 'difficulty'), json.dumps(difficulty)) #_atomic_write(os.path.join(datadir, 'donations'), json.dumps(donations)) _atomic_write(os.path.join(datadir, 'payouts'), json.dumps(payouts)) _atomic_write(os.path.join(datadir, 'stats'), json.dumps(stats)) _atomic_write(os.path.join(datadir, 'users'), json.dumps(users))