def random_bipartite_graph_generation_and_dump_in_json(): nb_sommets = 10 seed = 29 n1 = random.randint(1, nb_sommets - 1) n2 = nb_sommets - n1 p = 0.85 print("n1 = ", n1) print("n2 = ", n2) g = bipartite.random_graph(n1, n2, p, seed, directed=False) V1, V2 = nx.bipartite.sets(g) graph_for_json = dict() graph_for_json = { "seed": seed, "graph_type": "bipartite", "total_node_count": len(g.nodes), "V1_node_count": len(V1), "V2_node_count": len(V2), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } with open('test_dumps/random_bipartite_1.json', 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_for_json))
def complete_bipartite_graph_generation_and_dump_in_json(): nb_sommets = 15 seed = 29 n1 = random.randint(1, nb_sommets - 1) n2 = nb_sommets - n1 p = 0.5 g = bipartite.complete_bipartite_graph(n1, n2) V1, V2 = bipartite.sets(g) graph_for_json = dict() graph_for_json = { "seed": seed, "graph_type": "bipartite", "total_node_count": len(g.nodes), "V1_node_count": len(V1), "V2_node_count": len(V2), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } with open('test_dumps/complete_bipartite_1.json', 'w', encoding='utf8') as json_file: json.dump(graph_for_json, json_file, indent=4) # Ugly indents with open('test_dumps/complete_bipartite_2.json', 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_for_json)) # Nice indents
def test_data_to_json(): data = { "height": 4, "symbol_weight_bound": 5, "symbol_count": 5, "symbol_weights": [4, 5, 3, 4, 1], "tiles": [[0, 1, 2, 4], [0, 1, 3]], } assert raw_dump == json.dumps(data, indent=2) assert improved_dump == data_to_json(data)
def complete_generation(tailles, nb_instances, chemin_pour_stockage): for nb_sommets in tailles: for i in range(1, nb_instances + 1): g = nx.complete_graph(nb_sommets) graph_for_json = dict() graph_for_json = { "seed": "None", "graph_type": graph_type, "total_node_count": len(g.nodes), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } name = chemin_pour_stockage + graph_type + "__n=" + str( nb_sommets) + "__" + '{:03}'.format(i) + ".json" with open(name, 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_for_json))
def bipartite_generation(tailles, nb_instances, chemin_pour_stockage): #We need to create a dict to count the number of bipartite graphs for each density print("!!! WORK IN PROGRESS !!!") for nb_sommets in tailles: for i in range(1, nb_instances + 1): n1 = random.randint(1, nb_sommets - 1) n2 = nb_sommets - n1 p = 0.5 ################################################## WE HAVE TO DEFINE THIS VALUE g = bipartite.random_graph(n1, n2, p, i, directed=False) if ( nx.is_connected(g) == False ): #the bipartite graph is not connected, we will have to add edges print("YOU STILL HAVE WORK TO DO") V1, V2 = bipartite.sets(g) graph_for_json = dict() graph_for_json = { "seed": i, "graph_type": graph_type, "total_node_count": len(g.nodes), "V1_node_count": len(V1), "V2_node_count": len(V2), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } #Computes the density d = 0 #arrondi de la densité #Update the a counter linked to the density name = chemin_pour_stockage + graph_type + "__n=" + str( nb_sommets) + "__d=" + str(d) + "__" + '{:03}'.format( i) + ".json" with open(name, 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_for_json))
def dump_instances(config_path): cfg = json.loads(Path(config_path).read_text()) directory = Path(cfg["output_directory"]) directory.mkdir(parents=True, exist_ok=True) seed(cfg["seed"]) i = 1 column = 1 while i <= cfg["instance_count"]: height = randint(cfg["min_height"], cfg["max_height"]) maker = InstanceMaker( height, cfg["arity"], cfg["tile_min_size"], cfg["common_symbol_min_count"], cfg["common_symbol_max_count"], ) leaf_rate = randint(cfg["min_tile_percentage"], cfg["max_tile_percentage"]) / 100.0 kill_rate = randint(cfg["min_kill_percentage"], cfg["max_kill_percentage"]) / 100.0 symbol_weight_bound = randint(cfg["min_symbol_weight_bound"], cfg["max_symbol_weight_bound"]) try: instance = maker(leaf_rate, kill_rate, symbol_weight_bound) except DuplicateTiles: outcome = "D" except TileTooSmall: outcome = "S" except TooFewCommonSymbols: outcome = "F" except TooManyCommonSymbols: outcome = "M" else: (directory / instance["name"]).write_text(data_to_json(instance)) outcome = "." i += 1 print(outcome, end="" if column % 80 else "\n", flush=True) column += 1
def arbitrary_generation(tailles, densites, nb_instances, chemin_pour_stockage): graph_type = "arbitrary" for nb_sommets in tailles: for d in densites: m = (d * nb_sommets * (nb_sommets - 1)) / 2 for i in range(1, nb_instances + 1): g = nx.gnm_random_graph(nb_sommets, m, seed=i) graph_for_json = dict() graph_for_json = { "seed": i, "graph_type": graph_type, "total_node_count": len(g.nodes), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } name = chemin_pour_stockage + graph_type + "__n=" + str( nb_sommets) + "__d=" + str(d) + "__" + '{:03}'.format( i) + ".json" with open(name, 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_for_json))
def random_graph_generation_and_dump_in_json(): nb_sommets = 15 d = 0.5 m = (d * nb_sommets * (nb_sommets - 1)) / 2 seed = 29 graph_type = "normal" g = nx.gnm_random_graph(nb_sommets, m, seed) graph_in_data = dict() graph_in_data = { "seed": seed, "graph_type": graph_type, "total_node_count": len(g.nodes), "edge_count": len(list(g.edges)), "nodes": sorted(g.nodes), "edges": list(g.edges) } name = "test_dumps/random_graph_" + str(nb_sommets) + "_vertices.json" with open(name, 'w', encoding='utf8') as json_file: json_file.write(data_to_json(graph_in_data))
def run_one_config(self): print(f"Input: {self.input_dir}") print(f"Output: {self.output_dir}") print() table = Table(self) table.add_column("#", attribute="i", width=3) table.add_column("time", attribute="now", width=8) table.add_column("name", attribute="instance_name", width=27, align="left") table.add_column("step count", attribute="step_count", width=10) if self.existing_strategy != "nothing": table.add_column("ratio", attribute="step_count_delta", width=8) table.add_column("best", attribute="c_max", width=5) if self.existing_strategy != "nothing": table.add_column("ratio", attribute="c_max_delta", width=8) table.add_column("duration", align="left", width=21) table.print_header() files = sorted(self.input_dir.glob("*.json")) for (self.i, instance_path) in enumerate(files[self.start:self.stop], self.start): instance = Instance(instance_path) self.now = datetime.now().isoformat( timespec="seconds").partition("T")[2] self.instance_name = instance.name.replace(".json", "") self.step_count = "N/A" self.step_count_delta = "N/A" self.c_max = "N/A" self.c_max_delta = "N/A" self.duration = "N/A" previous_results = {} output_path = self.output_dir / instance.name if output_path.exists(): previous_results.update(json.loads(output_path.read_text())) if self.existing_strategy == "nothing": self.step_count = previous_results["step_count"] self.c_max = previous_results["c_max"] magnitude = previous_results["duration_magnitude"] self.duration = f"previously in ~1e{magnitude} s." table.print_row() continue elif self.non_existing_strategy == "nothing": table.print_row() continue print_half_row = table.print_half_row("name") next(print_half_row) results = self.solve_one(instance) self.step_count = results["step_count"] self.c_max = results["c_max"] self.duration = f"solved in {results['elapsed_time']:.2e} s." if previous_results: self.step_count_delta = self.ratio( previous_results["step_count"], self.step_count) self.c_max_delta = self.ratio(previous_results["c_max"], self.c_max) next(print_half_row) if previous_results and self.existing_strategy == "dry": continue if not previous_results and self.non_existing_strategy == "dry": continue report = { "solver_parameters": self.solver_parameters, "duration_magnitude": results["duration_magnitude"], "c_max": results["c_max"], "solution": results["solution"], "step_count": results["step_count"], "log": results["log"], } output_path.write_text(data_to_json(report)) print() if self.solved_instance_count: print(f"{self.solved_instance_count} instances solved", end=" ") print(f"in {round(self.total_elapsed_time, 2)} seconds.") else: print(f"All {self.i + 1} instances already solved.") print()
def get_json(self) -> str: return data_to_json(self.get_data())