def ba_gen(arguments): g = helpers.load_network(arguments.input_graph) if arguments.use_degree_seq: method = 'Degree Seq.' generated_graph = _degree_graph(g) else: method = 'Barabasi-Albert' generated_graph = _ba_graph(g) logger.info('Used method: [b]%s[/]' % method) logger.debug('Graph created. Fill up closeness values') N = generated_graph.vcount() progress = progressbar1.DummyProgressBar(end=10, width=15) if arguments.progressbar: progress = progressbar1.AnimatedProgressBar(end=N, width=15) for vs in generated_graph.vs: progress += 1 progress.show_progress() vs['name'] = 'BA%s' % vs.index vs['closeness'] = generated_graph.closeness(vs, mode=igraph.OUT) logger.info('Save to %s' % arguments.ba_output) generated_graph.save(arguments.ba_output)
def sh_gen(arguments): ba_graph = helpers.load_network(arguments.ba_graph) trace_count = int(arguments.route_count) node_ids = range(ba_graph.vcount()) logger.info('Trace count: %d' % trace_count) random_pairs = [random.sample(node_ids, 2) for x in xrange(trace_count)] random_pairs = [(ba_graph.vs[x[0]]['name'], ba_graph.vs[x[1]]['name']) for x in random_pairs] logger.info('Random pair count: %d' % len(random_pairs)) shls = [] results = [] for s_name, t_name in random_pairs: shl = ba_graph.shortest_paths(s_name, t_name)[0][0] + 1 while shl == float('inf'): s, t = random.sample(node_ids, 2) s_name = ba_graph.vs[s]['name'] t_name = ba_graph.vs[t]['name'] shl = ba_graph.shortest_paths(s_name, t_name)[0][0] + 1 logger.debug('From %s to %s SH: %d' % (s_name, t_name, shl)) shls.append(shl) results.append([(s_name, t_name), shl]) # result = zip(random_pairs, shls) helpers.save_to_json(arguments.sh_path_output, results)
def main(): parser = argparse.ArgumentParser( description='Save closeness values for all node', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('out') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) logger.info('Graph loaded from: %s' % arguments.network) logger.info('Graph vertex count: %d' % g.vcount()) if 'closeness' not in g.vs: logger.info('Calculate closeness values') progress = progressbar1.DummyProgressBar(end=10, width=15) if arguments.progressbar: progress = progressbar1.AnimatedProgressBar(end=g.vcount(), width=15) for n in g.vs: progress += 1 progress.show_progress() closeness = g.closeness(n, mode=igraph.OUT) n['closeness'] = closeness g.save(arguments.out)
def ba_calc(arguments): ba_graph = helpers.load_network(arguments.ba_graph) sh_paths = helpers.load_from_json(arguments.point_pairs) out = arguments.out min_stretch = arguments.min_stretch max_stretch = arguments.max_stretch max_c = len(sh_paths) arguments.lb = arguments.lb if 0 <= arguments.lb <= max_c else 0 arguments.ub = arguments.ub if 0 <= arguments.ub <= max_c else max_c arguments.lb, arguments.ub = (min(arguments.lb, arguments.ub), max(arguments.lb, arguments.ub)) sh_paths = sh_paths[arguments.lb:arguments.ub] vf_g_closeness = vft.convert_to_vf(ba_graph, vfmode=vft.CLOSENESS) results = [[] for x in xrange(min_stretch, max_stretch + 1)] for stretch in xrange(min_stretch, max_stretch + 1): logger.info('Calculate results with stretch %d' % stretch) result = ba_generator(ba_graph, sh_paths, stretch, vf_g_closeness, arguments.progressbar) results[stretch - min_stretch] = result helpers.save_to_json(out, results)
def main(): parser = argparse.ArgumentParser( description='Display statistical informations', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('metadata') parser.add_argument('--locations') parser.add_argument('out', type=str, help='Plot name pre') arguments = parser.parse_args() g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.metadata) if arguments.locations: locations = helpers.load_from_json(arguments.locations) else: locations = None f_name = arguments.out print print '------' print 'Graph: %s' % arguments.network print 'META: %s' % arguments.metadata print 'OUT: %s' % f_name print '------' print generate_db(g, meta, locations, f_name)
def main(): parser = argparse.ArgumentParser(description='Calculate meta information for real traces', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('output', type=argparse.FileType('w')) # parser.add_argument('--vfmode', type=str, default='labeled', dest='vfmode', # choices=['labeled', 'closeness']) # for paralelization parser.add_argument('--lower-bound', '-lb', type=int, default=0, dest='lb') parser.add_argument('--upper-bound', '-ub', type=int, default=-1, dest='ub') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) parser.add_argument('--with-prelabeled', action='store_true') parser.add_argument('--with-closeness', action='store_true') parser.add_argument('--with-degree', action='store_true') parser.add_argument('--with-lp-hard', action='store_true') parser.add_argument('--with-lp-soft', action='store_true') # parser.add_argument('--with-lp', action='store_true') # parser.add_argument('--with-vf', action='store_true') parser.add_argument('--try-per-trace', type=int, default=1, dest='try_per_trace') arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.meta) arguments.lb = arguments.lb if 0 <= arguments.lb <= len(meta) else 0 arguments.ub = arguments.ub if 0 <= arguments.ub <= len(meta) else len(meta) flags = { FLAG_PRELABELED: arguments.with_prelabeled, FLAG_CLOSENESS: arguments.with_closeness, FLAG_DEGREE: arguments.with_degree, FLAG_LP_HARD: arguments.with_lp_hard, FLAG_LP_SOFT: arguments.with_lp_soft } # if arguments.vfmode == 'labeled': mode = vft.ORDER_PRELABELED # elif arguments.vfmode == 'closeness': mode = vft.ORDER_CLOSENESS # else: raise RuntimeError('Unhandled vfmode') meta = meta[arguments.lb:arguments.ub] # update meta at place purify(g, meta, flags, arguments.try_per_trace, arguments.progressbar) logger.info('Save to %s' % arguments.output) helpers.save_to_json(arguments.output, meta)
def main(): formatter = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=('Syntetic route generator'), formatter_class=formatter) parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) # for paralelization parser.add_argument('--lower-bound', '-lb', type=int, default=0, dest='lb') parser.add_argument('--upper-bound', '-ub', type=int, default=-1, dest='ub') parser.add_argument('network') parser.add_argument('meta') parser.add_argument('out') subparsers = parser.add_subparsers(help=('Sub commands to switch ' 'between different functions')) trace_dir_help = 'Analyze original route direction decisions' trace_dir_arg = subparsers.add_parser('trace-dir', help=trace_dir_help) trace_dir_arg.set_defaults(dispatch=trace_dir) upwalker_help = 'How many up step required unnecessary?' upwalker_arg = subparsers.add_parser('upwalker', help=upwalker_help) upwalker_arg.add_argument('--mode', default='count', choices=['count', 'deepness']) upwalker_arg.set_defaults(dispatch=upwalker) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) g = g.simplify() meta = helpers.load_from_json(arguments.meta) meta = [ m for m in meta if m[helpers.IS_VF_CLOSENESS] == 1 and len(m[helpers.TRACE]) > 1 and m[helpers.TRACE][0] != m[helpers.TRACE][-1] ] arguments.lb = arguments.lb if 0 <= arguments.lb <= len(meta) else 0 arguments.ub = arguments.ub if 0 <= arguments.ub <= len(meta) else len( meta) meta = meta[arguments.lb:arguments.ub] vf_g = vft.convert_to_vf(g, vfmode=vft.CLOSENESS) arguments.dispatch(g, meta, vf_g, arguments)
def main(): parser = argparse.ArgumentParser( description='SANDBOX mode. Write something useful here', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('--sample-size', type=int, default=1000, dest='sample_size') parser.add_argument('--node-drop', type=float, default=.1, dest='node_drop') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) g = g.components().giant() logger.info('Graph loaded from: %s' % arguments.network) logger.info('Graph vertex count: %d' % g.vcount()) end = int(g.vcount() * arguments.node_drop) logger.info('Remaining node count: %d' % end) try: nodes = [(x.index, x['closeness']) for x in g.vs] except KeyError: logger.info('Calculate closeness values') progress = progressbar1.DummyProgressBar(end=10, width=15) if arguments.progressbar: progress = progressbar1.AnimatedProgressBar(end=g.vcount(), width=15) for n in g.vs: progress += 1 progress.show_progress() closeness = g.closeness(n) n['closeness'] = closeness g.save('with_closeness.gml') nodes = [(x.index, x['closeness']) for x in g.vs] nodes = sorted(nodes, reverse=True, key=lambda x: x[1]) top_nodes = [x[0] for x in nodes[:end]] delete_nodes = [x.index for x in g.vs if x.index not in top_nodes] g.delete_vertices(delete_nodes) logger.info('Left nodes: %d' % g.vcount()) purify(g, arguments.sample_size, arguments.progressbar)
def main(): parser = argparse.ArgumentParser(description='SANDBOX mode. Write something useful here', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('out') parser.add_argument('--count', type=int, default=1000) arguments = parser.parse_args() g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.meta) out = arguments.out g.vs['closeness'] = g.closeness() k = [x for x in meta if x[helpers.SH_LEN] == x[helpers.TRACE_LEN]] random.shuffle(k) k = k[:100] for m in k: if m[helpers.SH_LEN] != m[helpers.TRACE_LEN]: continue trace = m[helpers.TRACE] o = [g.vs.find(x)['closeness'] for x in trace] print 'ORIGINAL TRACE: \n%s--%s: %s ' % (max(o), sum(o), o) l = [] b = [] s, t = trace[0], trace[-1] sh = g.get_all_shortest_paths(s, t) print 'SH paths:' for p in sh: tr = [g.vs[x]['closeness'] for x in p] print '%s--%s' % (max(tr), sum(tr)) l.append(max(tr)) b.append(sum(tr)) print 'AVG:\n %s' % (sum(l) / float(len(l))) l_sorted = sorted(l, reverse=True) b_sorted = sorted(b, reverse=True) print 'SORTED MAX:' for x in l_sorted: if x == max(o): print '!!!' print x print 'SORTED SUM:' for x in b_sorted: if x == sum(o): print '!!!!' print x raw_input() exit(0) purify(g, meta, out, arguments.count)
def wrap_watts_trace_gen(args): g = helpers.load_network(args.network) traceroutes = helpers.load_from_json(args.original_traceroutes) max_c = len(traceroutes) args.lb = args.lb if 0 <= args.lb <= max_c else 0 args.ub = args.ub if 0 <= args.ub <= max_c else max_c args.lb, args.ub = (min(args.lb, args.ub), max(args.lb, args.ub)) traceroutes = traceroutes[args.lb:args.ub] watts_traceroutes = watts_trace_gen(g, traceroutes, args.progressbar) helpers.save_to_json(args.traceroute_dest, watts_traceroutes)
def main(): parser = argparse.ArgumentParser( description='SANDBOX mode. Write something useful here', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('out') parser.add_argument('--route-count', type=int, default=1000, dest='route_count') parser.add_argument('--try-per-trace', type=int, default=1, dest='try_per_trace') parser.add_argument('--with-lp', action='store_true', dest='with_lp') # for paralelization parser.add_argument('--lower-bound', '-lb', type=int, default=0, dest='lb') parser.add_argument('--upper-bound', '-ub', type=int, default=-1, dest='ub') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.meta) out = arguments.out arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) max_c = len(meta) arguments.lb = arguments.lb if 0 <= arguments.lb <= max_c else 0 arguments.ub = arguments.ub if 0 <= arguments.ub <= max_c else max_c arguments.lb, arguments.ub = (min(arguments.lb, arguments.ub), max(arguments.lb, arguments.ub)) meta = meta[arguments.lb:arguments.ub] purify(g, meta, out, arguments.route_count, arguments.try_per_trace, arguments.progressbar, arguments.with_lp)
def main(): parser = argparse.ArgumentParser( description= 'Filter out non vf and non lp traceroutes from given traceroute list', parents=[ argparse_general.commonParser, ], **argparse_general.commonParams) parser.add_argument('network') parser.add_argument('traceroutes') parser.add_argument( '--filter', default='sh+loop+ex+lp', help= 'Possible values: sh (short), loop (AS number repetition), ex (non existent), vf (non valley free), lp (non local preferenced), or any combination with + sign. Note that lp automatically means vf+lp' ) parser.add_argument('--lp-type', default='first', choices=['first', 'all'], dest='first_edge') parser.add_argument('output') arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) arguments.first_edge = arguments.first_edge == 'first' if arguments.first_edge: logger.debug('LP only first edge') else: logger.debug('LP all edge') g = helpers.load_network(arguments.network) traceroutes = helpers.load_from_json(arguments.traceroutes) arguments.lb = arguments.lb if 0 <= arguments.lb <= len(traceroutes) else 0 arguments.ub = arguments.ub if 0 <= arguments.ub <= len( traceroutes) else len(traceroutes) arguments.filter = arguments.filter.replace('lp', 'vf+lp') filters = arguments.filter.split('+') result = filter(g, traceroutes[arguments.lb:arguments.ub], filters, arguments.first_edge) helpers.save_to_json(arguments.output, result)
def main(): parser = argparse.ArgumentParser( description="Implementation of Barabasi's ide", parents=[ argparse_general.commonParser, ], **argparse_general.commonParams) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('output', type=argparse.FileType('w')) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) g = g.simplify() meta = helpers.load_from_json(arguments.meta) betweenness = g.edge_betweenness() maxb = max(betweenness) weights = [math.log(maxb / x) for x in betweenness] g.es['weight'] = weights for m in meta: trace = m[helpers.TRACE] trace_id = [g.vs.find(x).index for x in trace] s, t = trace_id[0], trace_id[-1] barabasi_path = random.choice( g.get_shortest_paths(s, t, weights='weight')) shortest_path = random.choice(g.get_shortest_paths(s, t)) bcost = barabasi_cost(g, barabasi_path) shcost = barabasi_cost(g, shortest_path) trcost = barabasi_cost(g, trace_id) shdelta = shcost - bcost trdelta = trcost - bcost delta = shdelta - trdelta print delta
def main(): parser = argparse.ArgumentParser(description='SANDBOX mode. Write something useful here', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('--top-node-ratio', type=float, default=.1, dest='top_node_ratio') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.meta) logger.info('Graph loaded from: %s' % arguments.network) logger.info('Graph vertex count: %d' % g.vcount()) end = int(g.vcount() * arguments.top_node_ratio) logger.info('Top node count: %d' % end) try: nodes = [(x.index, x['closeness']) for x in g.vs] except KeyError: logger.info('Calculate closeness values') progress = progressbar1.DummyProgressBar(end=10, width=15) if arguments.progressbar: progress = progressbar1.AnimatedProgressBar(end=g.vcount(), width=15) for n in g.vs: progress += 1 progress.show_progress() closeness = g.closeness(n) n['closeness'] = closeness g.save('%s_with_closeness.gml' % arguments.network) nodes = [(x.index, x['closeness']) for x in g.vs] nodes = sorted(nodes, reverse=True, key=lambda x: x[1]) top_nodes = set([x[0] for x in nodes[:end]]) purify(g, meta, top_nodes, arguments.progressbar)
def main(): parser = argparse.ArgumentParser( description='Generate shortest path between random endpoins', parents=[argparse_general.commonParser, ], **argparse_general.commonParams) parser.add_argument('network') parser.add_argument('out') parser.add_argument( '--route-count', type=int, default=1000, dest='route_count') arguments = parser.parse_args() g = helpers.load_network(arguments.network) # g = g.components().giant() out = arguments.out arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) purify(g, out, arguments.route_count, arguments.progressbar)
def main(): parser = argparse.ArgumentParser( parents=[ argparse_general.commonParser, ], description='Calculate closeness values and save the graph with them', **argparse_general.commonParams) parser.add_argument( 'network', help='Input network. Use any format which compatible with igraph') parser.add_argument( 'out', help= 'File path to save the graph with closeness values. GML extension required.' ) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) out = arguments.out purify(g, out)
def main(): parser = argparse.ArgumentParser( description='Pretty plot', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('meta') # parser.add_argument('output') parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.meta) edges = [(e.source, e.target) for e in g.es] rand_edges = random.sample(edges, 40) progress = progressbar1.DummyProgressBar(end=10, width=15) if arguments.progressbar: progress = progressbar1.AnimatedProgressBar(end=len(meta), width=15) for m in meta[:15]: progress += 1 progress.show_progress() trace = m[helpers.TRACE] if len(trace) < 3: continue s, t = trace[0], trace[-1] sh = g.get_shortest_paths(s, t)[0] pretty_plot(g, trace, sh, rand_edges)
def main(): parser = argparse.ArgumentParser(description='Display statistical informations', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('network') parser.add_argument('metadata') parser.add_argument('out', type=str, help='folder path for results') parser.add_argument('--stretch-stat', dest='stretch_stat', action='store_true') parser.add_argument('--eye-stat', dest='eye_stat', action='store_true') parser.add_argument('--eye-stat-basic', dest='eye_stat_basic', action='store_true') parser.add_argument('--ba-stat', dest='ba_stat', nargs='+') parser.add_argument('--degree-dist', dest='degree_dist', action='store_true') parser.add_argument('--simple-load', dest='simple_load', action='store_true') parser.add_argument('--load2d') parser.add_argument('--stats', dest='stats', action='store_true') parser.add_argument('--upwalk') parser.add_argument('--verbose', '-v', action='count', default=0) arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) meta = helpers.load_from_json(arguments.metadata) out_folder_path = arguments.out if out_folder_path.endswith('/'): out_folder_path = out_folder_path[:-1] # print 'WIKI MODE!!!!!' # meta = [x for x in meta if x[helpers.SH_LEN] == 4 and x[helpers.TRACE_LEN] < 10] # print 'ONLY WITH RANDOM NONVF WALK' # meta = [x for x in meta if helpers.RANDOM_NONVF_WALK_RUN_COUNT in x] if arguments.stretch_stat: logger.info('Generate stretch statistics') stretch_stat(meta, out_folder_path) if arguments.eye_stat: logger.info('Generate Eye statistics') eye_stat(meta, out_folder_path) if arguments.eye_stat_basic: logger.info('Generate Basic Eye statistics') eye_stat_basic(meta, out_folder_path) if arguments.ba_stat: logger.info('Generate Barabasi-Albert statistics') ba_stat(meta, arguments.ba_stat, out_folder_path) if arguments.degree_dist: logger.info('Generate degree distributions') degree_distribution_stat(g, out_folder_path) if arguments.load2d: logger.info('Generate load stat') tr = helpers.load_from_json(arguments.load2d) load2d(g, tr, out_folder_path) if arguments.simple_load: logger.info('Generate simple load based on meta') simple_load(g, meta, out_folder_path) if arguments.stats: logger.info('Stat gen') stats(g, meta ) # stat_printer(statistic) if arguments.upwalk: logger.info('Upwalk') upwalk(arguments.upwalk, out_folder_path)
def main(): parser = argparse.ArgumentParser( description=('Syntetic route generator'), formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) parser.add_argument('network') parser.add_argument('meta') parser.add_argument('all_trace_out', metavar='all-trace-out') parser.add_argument('syntetic_out', metavar='syntetic-out') parser.add_argument('--trace-count', '-tc', type=int, dest='trace_count', default=5000) parser.add_argument('--random-sample', dest='random_sample', action='store_true') parser.add_argument('--closeness-error', '-ce', type=float, dest='closeness_error', default=0.0) parser.add_argument('--core-limit-percentile', '-cl', type=int, dest='core_limit', default=0) parser.add_argument('--toggle-node-error-mode', action='store_true') arguments = parser.parse_args() arguments.verbose = min(len(helpers.LEVELS) - 1, arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) show_progress = arguments.progressbar g = helpers.load_network(arguments.network) g = g.simplify() meta = helpers.load_from_json(arguments.meta) if arguments.random_sample: random.shuffle(meta) meta = meta[:arguments.trace_count] N = g.vcount() cl = sorted([x['closeness'] for x in g.vs], reverse=True) logger.info('Min closeness: %s' % np.min(cl)) logger.info('Max closeness: %s' % np.max(cl)) logger.info('Mean closenss: %s' % np.mean(cl)) logger.info('10%% closeness: %s' % np.percentile(cl, 10)) logger.info('90%% closeness: %s' % np.percentile(cl, 90)) logger.info('Core limit: [r]%d%%[/]' % arguments.core_limit) change_probability = 100 * arguments.closeness_error logger.info('Change probability: [r]%6.2f%%[/]' % change_probability) core_limit = np.percentile(cl, arguments.core_limit) logger.info('Core limit in closeness: [bb]%f[/]' % core_limit) if arguments.toggle_node_error_mode: logger.info('[r]Node error mode[/]') msg = ( "If given node's closeness >= core_limit then the new ", "closeness in this node is ", #"rand(closeness_error ... old closeness)" "OLD_CLOSENSS * +/- closeness_error%") logger.info(''.join(msg)) logger.info('Minimum node closeness: [g]%f[/]' % arguments.closeness_error) for n in g.vs: if n['closeness'] < core_limit: continue # sign = -1 if random.uniform(-1, 1) < 0 else 1 # n['closeness'] = n['closeness'] * (1 + sign * arguments.closeness_error) new_closeness = random.uniform(arguments.closeness_error, n['closeness']) n['closeness'] = new_closeness g_labeled = vft.label_graph_edges(g, vfmode=vft.CLOSENESS) peer_edge_count = len([x for x in g_labeled.es if x['dir'] == LinkDir.P]) logger.info('Peer edge count: %d' % peer_edge_count) changed_edges = [] if not arguments.toggle_node_error_mode: msg = ("If the closeness values of the endpoints in given edge is ", "larger than the core_limit and ", "random(0,1) < closeness_error then change the direction ", "for this edge") logger.info(''.join(msg)) changed_u = changed_d = 0 changed_edges = [] changed_edgess = [] for edge in g_labeled.es: s, t = edge.source, edge.target s_cl = g_labeled.vs[s]['closeness'] t_cl = g_labeled.vs[t]['closeness'] if (s_cl < core_limit or t_cl < core_limit): continue if random.uniform(0, 1) > arguments.closeness_error: continue # if abs(s_cl - t_cl) / min(s_cl, t_cl) > 0.02: continue new_edge_dir = LinkDir.U if random.uniform(0, 1) > 0.5 else LinkDir.D if new_edge_dir != edge['dir']: if edge['dir'] == LinkDir.U: changed_u += 1 else: changed_d += 1 edge['dir'] = new_edge_dir changed_edges.append(edge) changed_edgess.append((edge.source, edge.target)) # if edge['dir'] == LinkDir.U: # changed_u += 1 # changed_edgess.append((edge.source, edge.target)) # edge['dir'] = LinkDir.D # changed_edges.append(edge) # elif edge['dir'] == LinkDir.D: # changed_d += 1 # changed_edgess.append((edge.source, edge.target)) # edge['dir'] = LinkDir.U # changed_edges.append(edge) logger.info('E count: %d' % g_labeled.ecount()) logger.info('Changed U: %d' % changed_u) logger.info('Changed D: %d' % changed_d) logger.info('Changed: %d' % (changed_d + changed_u)) changed_e = [(g_labeled.vs[e.source]['name'], g_labeled.vs[e.target]['name']) for e in changed_edges] changed_e = changed_e + [(x[1], x[0]) for x in changed_e] changed_e = set(changed_e) vf_g_closeness = vft.convert_to_vf(g, vfmode=vft.CLOSENESS, labeled_graph=g_labeled) # e_colors = [] # for e in vf_g_closeness.es: # if e.source < N and e.target < N: col = 'grey' # elif e.source < N and e.target >= N: col = 'blue' # elif e.source >= N and e.target >= N: col = 'red' # else: col = 'cyan' # e_colors.append(col) # igraph.plot(vf_g_closeness, "/tmp/closeness.pdf", # vertex_label=vf_g_closeness.vs['name'], # vertex_size=0.2, # edge_color=e_colors) pairs = [(g.vs.find(x[helpers.TRACE][0]).index, g.vs.find(x[helpers.TRACE][-1]).index, tuple(x[helpers.TRACE])) for x in meta] # pairs = list(set(pairs)) # random.shuffle(pairs) # visited = set() # pairs2 = [] # for x in pairs: # k = (x[0], x[1]) # if k in visited: continue # visited.add(k) # visited.add((k[1], k[0])) # pairs2.append(x) # pairs = pairs2 traces = [x[2] for x in pairs] stretches = [] syntetic_traces = [] sh_traces = [] base_traces = [] original_traces = [] bad = 0 progress = progressbar1.DummyProgressBar(end=10, width=15) if show_progress: progress = progressbar1.AnimatedProgressBar(end=len(pairs), width=15) for s, t, trace_original in pairs: progress += 1 progress.show_progress() trace_original_idx = [g.vs.find(x).index for x in trace_original] logger.debug('Original trace: %s -- %s -- %s', [g.vs[x]['name'] for x in trace_original_idx], vft.trace_to_string(g, trace_original_idx, vft.CLOSENESS), [g.vs[x]['closeness'] for x in trace_original_idx]) sh_routes = g.get_all_shortest_paths(s, t) sh_len = len(sh_routes[0]) sh_routes_named = [[g.vs[y]['name'] for y in x] for x in sh_routes] sh_trace_name = random.choice(sh_routes_named) base_trace_name = random.choice(sh_routes_named) candidates = vf_g_closeness.get_all_shortest_paths(s + N, t + N) candidates = [vft.vf_route_converter(x, N) for x in candidates] # candidates = [] if len(candidates) == 0: candidates = vft.get_shortest_vf_route(g_labeled, s, t, mode='vf', vf_g=vf_g_closeness, _all=True, vfmode=vft.CLOSENESS) if len(candidates) == 0: s_name, t_name = g.vs[s]['name'], g.vs[t]['name'] logger.debug("!!!No syntetic route from %s to %s" % (s_name, t_name)) continue logger.debug('Candidates from %s to %s:' % (g.vs[s]['name'], g.vs[t]['name'])) for c in candidates: logger.debug('%s -- %s -- %s' % ([g.vs[x]['name'] for x in c], vft.trace_to_string(g_labeled, c, vft.PRELABELED), [g.vs[x]['closeness'] for x in c])) chosen_one = random.choice(candidates) chosen_one_name = [g.vs[x]['name'] for x in chosen_one] # print chosen_one # print trace_original # pretty_plotter.pretty_plot(g, trace_original_idx, # chosen_one, changed_edgess, # spec_color=(0, 0, 0, 155)) hop_stretch = len(chosen_one) - sh_len stretches.append(hop_stretch) trace_original_e = zip(trace_original, trace_original[1:]) chosen_one_e = zip(chosen_one_name, chosen_one_name[1:]) trace_affected = any([x in changed_e for x in trace_original_e]) chosen_affected = any([x in changed_e for x in chosen_one_e]) logger.debug('Trace affected: %s' % trace_affected) logger.debug('Chosen affected: %s' % chosen_affected) # if hop_stretch > 2: # logger.debug('Base: %s' % trace_to_string(g_labeled, base_trace_name)) # logger.debug('SH: %s' % trace_to_string(g_labeled, sh_trace_name)) # logger.debug('Trace: %s' % trace_to_string(g_labeled, trace_original)) # logger.debug('Syntetic: %s' % trace_to_string(g_labeled, chosen_one_name)) if trace_affected or chosen_affected or hop_stretch > 2: # pretty_plotter.pretty_plot_all(g, traces, # chosen_one, changed_edgess, # spec_color=(0, 0, 0, 255)) bad += 1 syntetic_traces.append(chosen_one_name) sh_traces.append(sh_trace_name) base_traces.append(base_trace_name) original_traces.append(trace_original) logger.debug('From %s to %s chosen one %s' % (g.vs[s]['name'], g.vs[t]['name'], chosen_one_name)) result = zip(base_traces, sh_traces, original_traces, syntetic_traces) helpers.save_to_json(arguments.all_trace_out, result) helpers.save_to_json(arguments.syntetic_out, syntetic_traces) print 'Bad: %d' % bad c = collections.Counter(stretches) trace_count = len(syntetic_traces) logger.info('Stretch dist:') for k in c: logger.info('\t%d: %5.2f%%[%d]' % (k, 100 * c[k] / float(trace_count), c[k])) logger.info('Valid route count: %d' % trace_count) logger.info('Route count parameter: %d' % arguments.trace_count) logger.info('Generated valid pair count: %d' % len(pairs))
def main(): parser = argparse.ArgumentParser( description=('SANDBOX mode. ', 'Write something ', 'useful here'), formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--progressbar', action='store_true') parser.add_argument('--verbose', '-v', action='count', default=0) parser.add_argument('--edge-drop', dest='edge_drop', type=float, default=0.0) parser.add_argument('--closeness-limit', dest='closeness_limit', type=float, default=0.0) parser.add_argument('network') parser.add_argument('traceroutes') arguments = parser.parse_args() show_progress = arguments.progressbar arguments.verbose = min(len(helpers.LEVELS), arguments.verbose) logging.getLogger('compnet').setLevel(helpers.LEVELS[arguments.verbose]) g = helpers.load_network(arguments.network) traceroutes = helpers.load_from_json(arguments.traceroutes) logger.info('ecount: %d' % g.ecount()) logger.info('vcount: %d' % g.vcount()) logger.info('trace count: %d' % len(traceroutes)) g_dummy = g.copy() progress = progressbar1.DummyProgressBar(end=10, width=15) if show_progress: progress = progressbar1.AnimatedProgressBar(end=len(traceroutes), width=15) closeness_list = [] for x in g_dummy.vs: progress += 1 progress.show_progress() closeness_list.append((x.index, g_dummy.closeness(x))) end = int(arguments.closeness_limit * g_dummy.vcount()) logger.debug('Top node count: %d' % end) top_nodes = sorted(closeness_list, key=lambda x: x[1], reverse=True)[:end] top_nodes_index = [x[0] for x in top_nodes] top_nodes_name = [g_dummy.vs[x[0]]['name'] for x in top_nodes] top_edges = [ e for e in g_dummy.es if e.source in top_nodes_index and e.target in top_nodes_index ] logger.debug('Top edge count: %d' % len(top_edges)) random.shuffle(top_edges) edge_drop = top_edges[:int(len(top_edges) * arguments.edge_drop)] logger.debug('Dropped edge count: %d' % len(edge_drop)) # edges = [x.index for x in g_dummy.es] # random.shuffle(edges) # edge_drop = edges[:int(g.ecount() * arguments.edge_drop)] g_dummy.delete_edges(edge_drop) traceroutes = traceroutes[:10000] all_edges = [] for trace in traceroutes: edges = zip(trace, trace[1:]) edges = [tuple(sorted(e)) for e in edges] all_edges.extend(edges) all_edges = list(set(all_edges)) top_edges = [ e for e in all_edges if e[0] in top_nodes_name and e[1] in top_nodes_name ] logger.info('TOP edge count in real traceroutes: %d' % len(top_edges)) found_top_edges = [] increments = [] for trace in traceroutes: edges = zip(trace, trace[1:]) edges = [tuple(sorted(e)) for e in edges] top_edges = [ x for x in edges if x[0] in top_nodes_name and x[1] in top_nodes_name ] found_top_edges.extend(top_edges) found_top_edges = list(set(found_top_edges)) increments.append(len(found_top_edges)) logger.info('Found top edge count: %d' % len(found_top_edges)) dummy_sh_traceroutes_meta = [] original_sh_traceroutes_meta = [] stretches = [] progress = progressbar1.DummyProgressBar(end=10, width=15) if show_progress: progress = progressbar1.AnimatedProgressBar(end=len(traceroutes), width=15) for trace in traceroutes: progress += 1 progress.show_progress() s, t = trace[0], trace[-1] # logger.debug('Get shortest paths from {s} to {t}'.format(s=s, t=t)) sh_dummy = random.choice(g_dummy.get_shortest_paths(s, t)) sh_original = random.choice(g.get_shortest_paths(s, t)) stretch = len(sh_dummy) - len(sh_original) dummy_sh_traceroutes_meta.append((sh_dummy, stretch)) original_sh_traceroutes_meta.append((sh_original, 0)) stretches.append(stretch) # logger.debug('Stretch: %d' % stretch) # logger.debug('SH DUMMY: %s' % [g_dummy.vs[x]['name'] for x in sh_dummy]) # logger.debug('SH ORIG: %s' % [g.vs[x]['name'] for x in sh_original]) dummy_sh_meta = [(x[0], x[1], vft.is_valley_free(g_dummy, x[0], vft.CLOSENESS)) for x in dummy_sh_traceroutes_meta] dummy_sh_len_hist = collections.Counter( [len(x[0]) for x in dummy_sh_traceroutes_meta]) original_sh_len_hist = collections.Counter( [len(x[0]) for x in original_sh_traceroutes_meta]) original_len_hist = collections.Counter([len(x) for x in traceroutes]) stretches = [x for x in stretches if x >= 0] stretch_hist = collections.Counter(stretches) import matplotlib.pyplot as plt print print[(x, 100 * y / float(len(traceroutes)), y) for x, y in stretch_hist.iteritems()] plt.plot([x for x in stretch_hist.iterkeys()], [x for x in stretch_hist.itervalues()], 'g^') plt.ylabel('some numbers') # plt.show() logger.info('Dummy VF stat') max_stretch = max(dummy_sh_meta, key=lambda x: x[1])[1] for stretch in range(0, max_stretch + 1): stretched_traces = [x for x in dummy_sh_meta if x[1] == stretch] count = len(stretched_traces) vf_count = len([x for x in stretched_traces if x[2]]) vf_perc = vf_count / float(count) nonvf_count = count - vf_count nonvf_perc = nonvf_count / float(count) logger.info( '{stretch} -- {vf_perc}[{vf_count}]\t{nonvf_perc}[{nonvf_count}]'. format(stretch=stretch, vf_perc=vf_perc, vf_count=vf_count, nonvf_perc=nonvf_perc, nonvf_count=nonvf_count)) import matplotlib.pyplot as plt plt.plot(increments, 'g^') plt.ylabel('some numbers') plt.show()
def wrap_watts_converter(args): g = helpers.load_network(args.source_network) watts_g = watts_converter(g, args.progressbar) watts_g.save(args.target)