def execute(self): max_workers = self.get_value("max_workers") chunk_size = self.get_value("chunk_size") use_mpi = self.get_value("use_mpi") force_single_core = self.get_value("force_single_core") problem_dir = file.get_relative_path(self.get_value("input_dir"), self._parent_dir) problem_list = file.get_file_list(problem_dir, constants.PROBLEM_FILE_REGEX) domain_list = file.get_file_list(problem_dir, constants.DOMAIN_FILE_REGEX) assert len(domain_list) == 1 domain_file = domain_list[0] if force_single_core: results = executor.singlecore_execute(self.solve, (domain_file, problem_list)) else: results = executor.multicore_execute(self.solve, (domain_file, problem_list), self.generate_args, max_workers, chunk_size, use_mpi) return results
def __init__(self, name, search_param_dict, parent_dir): super(Pyperplan, self).__init__(name, search_param_dict) self._mode = search_param_dict["mode"] assert self._mode in Pyperplan._MODES if self._mode == Pyperplan._ASTAR_NNPLACT \ or self._mode == Pyperplan._GBF_NNPLACT: self._model_dir = file.get_relative_path( search_param_dict["model_dir"], parent_dir) self._model_name = search_param_dict["model_name"]
def get_heuristic(search_param_dict, problem, parent_dir): heuristic_type = search_param_dict["heuristic"] if "bfs" == heuristic_type: return BFS(problem) elif "nn_rollout" == heuristic_type: model_dir = file.get_relative_path(search_param_dict["model_dir"], parent_dir) return NNRollout(problem, model_dir, search_param_dict["model_name"]) elif "nn_plact" == heuristic_type: model_dir = file.get_relative_path(search_param_dict["model_dir"], parent_dir) return NNPLACT(problem, model_dir, search_param_dict["model_name"]) else: raise Exception("Unknown heuristic_type={}".format(heuristic_type))
def execute(self): # Get the documents. input_dir = file.get_relative_path(self.get_value("input_dir"), self._parent_dir) # self.execute_main(input_dir) documents = self.get_documents(input_dir) # Plot the cumulative plots. cdp = CDP() for document in documents: cdp.add_data(document) cdp.plot_data(input_dir) # Get the generic test set data. nodes_expanded_series, plan_length_series, solved_series, domain, \ problem_params = Plot.get_wholistic_data(documents) # Create the bins. bins = Bins(problem_params, self.get_value("num_bins"), self._phase_dict.get("bin_filters", {})) bins_nodes_expanded, bins_plan_length, bins_solved, \ solutions_nodes_expanded, solutions_plan_length, solutions_solved = \ Plot.get_problem_dependent_data(bins, documents) # Plot the problem size dependent data. self._plot_box_bar(domain, "aggregate", "aggregate", input_dir, nodes_expanded_series, plan_length_series, solved_series) for bin_name in bins_nodes_expanded: bin_index = bins.get_index_from_name(bin_name) self._plot_box_bar(domain, bin_index, bin_name, input_dir, bins_nodes_expanded[bin_name], bins_plan_length[bin_name], bins_solved[bin_name]) self._plot_bins(domain, "avg", "avg", input_dir, bins, solutions_nodes_expanded, solutions_plan_length, solutions_solved)
def _execute(self): max_workers = self.get_value("max_workers") chunk_size = self.get_value("chunk_size") use_mpi = self.get_value("use_mpi") force_single_core = self.get_value("force_single_core") self.initialize_directories() try: shutil.rmtree("/tmp/train") shutil.rmtree("/tmp/eval") shutil.rmtree("/tmp/predict") except: pass # Get the training data. training_dir = file.get_relative_path(self.get_value("input_dir"), self._parent_dir) training_data = self.get_training_data(training_dir, max_workers, chunk_size, use_mpi, force_single_core, None) # Form the complete abstract domain. abstract_domains_iter = itertools.starmap( lambda abstract_domain, _: abstract_domain, training_data) abstract_domain = AbstractDomain.merge_abstract_domains( abstract_domains_iter) abstract_domain.initialize_nn_parameters() # Remap the training data. remapped_training_data = self.remap_training_data(abstract_domain, training_data, max_workers, chunk_size, use_mpi, force_single_core) # for i in range(len(remapped_training_data)): # # print(i, "******************************************************") # for layer in remapped_training_data[i]._layers: # # print("===== LAYER =====", layer) # print(remapped_training_data[i].decode(layer)) # Train the network. train_try = 0 should_train = True while should_train and train_try < self.get_value("train_retries"): train_try += 1 logger.info("Training network: Attempt %u/%u" % ( train_try, self.get_value("train_retries"))) nn = NN.get_instance(abstract_domain, self.get_value("nn_type"), self.get_value("nn_name")) nn.plot_model(self._base_dir) should_train = not nn.train(remapped_training_data, self.get_value("epochs"), self.get_value("batch_size"), self.get_value("shuffle"))[0] # Evaluate the network. if self.has_key("evaluate_dir"): test_dir = file.get_relative_path(self.get_value("evaluate_dir"), self._parent_dir) test_data = self.get_training_data(test_dir, max_workers, chunk_size, use_mpi, force_single_core, abstract_domain) test_pkgs_list = [] test_pkgs_list += itertools.starmap( lambda _, test_pkgs_list: test_pkgs_list, test_data) test_pkgs_list = list(flatten(test_pkgs_list)) nn.evaluate(test_pkgs_list) # Save the model. nn.save(self._base_dir) return []