def _find_nof_links(self): min_links = 100000000 max_links = -100000000 min_nodes = 100000000 max_nodes = -100000000 for elem in self.data: if "topology" in elem: n_links = elem["topology"]["nof_links"] n_nodes = elem["topology"]["nof_nodes"] self.nof_links_for_topology[elem["topology"]["name"]] = n_links self.nof_nodes_for_topology[elem["topology"]["name"]] = n_nodes if elem["topology"]["name"] in ["isp"]: break if n_links < min_links: min_links = n_links if n_links > max_links: max_links = n_links if n_nodes < min_nodes: min_nodes = n_nodes if n_nodes > max_nodes: max_nodes = n_nodes log.info("Links: MIN {} MAX {}".format(min_links, max_links)) log.info("Nodes: MIN {} MAX {}".format(min_nodes, max_nodes)) self._print_links_nodes("Colt") self._print_links_nodes("Uninett2010") self._print_links_nodes("Kdl") self._print_links_nodes("AS-3549")
def _run_scenarios(scenarios: list, data_file: str, log_file: str): """ :param scenarios: list[BasicScenario] """ log.initialize('ERROR', data_log_file=data_file, log_file=log_file, file_level='INFO') np.random.seed() for s in scenarios: with log_context(str(s)): try: s.run() except Exception: log.error("Exception while running scenario %s", str(s), exc_info=True) log.info("finished running scenarios")
def _plot_1bc(self): data_list = [] nof_states = {} for elem in self.data: if "precision" in elem and elem["ctx"][0] == "Colt-trace": data_list.append((elem["ctx"][1], elem["precision"])) elif "finished" in elem and elem["ctx"][0] == "Colt-trace": nof_states[elem["ctx"][1]] = elem["finished"]["num_explored"] df = pd.DataFrame(data_list, columns=["rep", "precision"]) sph.new_figure(9, 6) self._multi_trace_plot(df, nof_states) sph.savefig(os.path.join(self.output_dir, "plot_1bi.pdf")) time_data_list = [] for elem in self.data: if "time-explore" in elem and elem["ctx"][0] == "isp-trace": time_data_list.append(("ISP", elem["time-explore"])) if len(time_data_list) == 0: log.warning("skipping plot_1cii as ISP data is missing") return df = pd.DataFrame(time_data_list, columns=["net", "time"]) max_y = 120 log.info("Outliers:\n%s", str(df[df.time > max_y])) nof_greater_max_y = df[df.time > max_y].count()["net"] fig, ax = sph.new_figure(2, 6) df.boxplot(column="time", by="net", ax=ax, grid=False, flierprops=self.flierprops) ax.grid(b=True, which='major', axis='y', color='w') ax.set_axisbelow(True) plt.ylim([0, max_y]) plt.xlabel("") plt.ylabel("time [s]") plt.title("") fig.suptitle("") plt.gcf().text(0.4, 0.93, "+ {}".format(nof_greater_max_y), fontsize=12) sph.savefig(os.path.join(self.output_dir, "plot_1cii.pdf"))
args = parser.parse_args() if args.debug: log.initialize('DEBUG') elif args.quiet: log.initialize('WARNING') else: log.initialize('INFO') input_file = get_relative_to_working_directory(args.input_file) query_file = None if args.query: query_file = get_relative_to_working_directory(args.query) parser = InputParser(input_file, query_file) problems = parser.get_problems() for problem in problems: problem.target_precision = args.precision explorer = Explorer(problem) sol = explorer.explore_all() log.info("explored states: {}".format(sol.num_explored)) log.info("precision: {}".format(sol.p_explored.invert().val())) p_low = sol.p_property.val() p_up = sol.p_property.val() + sol.p_explored.invert().val() print("P({}) ∈ [{:8.8f}, {:8.8f}]".format( problem.property.get_human_readable(parser.name_resolver), p_low, p_up))
"Xcongest-{}".format(n_flows), as_3549_file, 1.0E-4, 10, n_flows, timeout_h=2.0)) """ # real ISP configuration (for plot_1cii, under NDA) isp_file = os.path.join(input_dir, "private", "isp.json") runner.scenarios.append(RealScenario("isp", "trace", isp_file, 1.0E-4, 10, collect_precision=True)) """ runner.run_all(args.processes) if args.analyze: log.initialize('INFO') log.info("Generating plots for data directory '%s'", input_dir) # collect experiment data log.info("Collecting data...") data = [] for fname in os.listdir(input_dir): if fname.startswith("experiment_data") and fname.endswith(".log"): fpath = os.path.join(input_dir, fname) with open(fpath, 'r') as f: for line in f: data.append(json.loads(line)) # analyze data, create plots Analyzer(data, output_dir).analyze()
def _plot_1a(self): data_list = [] prec_list = [] is_timeout = set() max_time_explore = {} for elem in self.data: if "timeout_after_seconds" in elem and elem["ctx"][0].endswith( "-default"): is_timeout.add((self._get_topo_name(elem), elem["ctx"][1])) elif "time-explore" in elem and elem["ctx"][0].endswith( "-default"): nof_links = self.nof_links_for_topology[self._get_topo_name( elem)] if self._get_topo_name( elem) not in max_time_explore or max_time_explore[ self._get_topo_name(elem)] < elem["time-explore"]: max_time_explore[self._get_topo_name( elem)] = elem["time-explore"] if nof_links <= 75: range = "50--75" elif nof_links <= 100: range = "76--100" elif nof_links <= 200: range = "101--200" else: range = "$>$ 200" timeout = (self._get_topo_name(elem), elem["ctx"][1]) in is_timeout data_list.append( (elem["ctx"][0], nof_links, range, elem["ctx"][1], elem["time-explore"], timeout)) elif "finished" in elem and elem["ctx"][0].endswith("-default"): prec_list.append( (elem["ctx"][0], elem["finished"]["precision"])) df = pd.DataFrame(data_list, columns=[ "experiment", "links", "range", "rep", "time", "is_timeout" ]) df_prec = pd.DataFrame(prec_list, columns=["experiment", "precision"]) # count number of timeouts per topology df_to = df[["experiment", "is_timeout"]].groupby("experiment").sum() log.info("Number of timeouts:\n%s", str(df_to[df_to.is_timeout > 0])) # compute the worst-case imprecision log.info( "Worst imprecision:\n%s", str(df_prec[df_prec.precision > 1E-4].groupby("experiment").max())) sph.new_figure(11, 5.5) plt.axhline(60 * 60, c="gray", lw=1, label="1 h (timeout)") df_max = df[["experiment", "time", "links"]].groupby("experiment").max() plt.plot("links", "time", "x", data=df_max, markersize=4, mew=0.6, label="maximum") df_med = df[["experiment", "time", "links"]].groupby("experiment").median() plt.plot("links", "time", "+", data=df_med, markersize=4, mew=0.6, label="median") plt.xlabel("links") plt.ylabel("time [s]") plt.legend(handletextpad=0.3) plt.loglog() sph.savefig(os.path.join(self.output_dir, "plot_1a.pdf"))
def _print_links_nodes(self, topo_name): log.info("Topology {}: {} nodes, {} links".format( topo_name, self.nof_nodes_for_topology[topo_name], self.nof_links_for_topology[topo_name]))
def analyze(self): if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) log.info("Collecting topology statistics...") self._find_nof_links() log.info("Generating plot 1a...") self._plot_1a() log.info("Generating plot 1bc...") self._plot_1bc() log.info("Generating plot intro...") self._plot_intro() log.info("Generating plot 2...") self._plot_2() log.info("Generating plot 3...") self._plot_3() log.info("Generating plot 5...") self._plot_5()