def find_minimum(model: models.ModelWrapper, optim_config: config.OptimConfig) -> dict: optimiser = optim_config.algorithm_type( model.parameters(), **optim_config.algorithm_args) # type: optim.Optimizer # Scheduler if optim_config.scheduler_type is not None: scheduler = optim_config.scheduler_type(optimiser, **optim_config.scheduler_args) else: scheduler = None # Initialise model.initialise_randomly() if optim_config.eval_config is not None: model.adapt_to_config(optim_config.eval_config) # Optimise for _ in helper.pbar(range(optim_config.nsteps), "Find mimimum"): model.apply(gradient=True) if scheduler is not None: scheduler.step() optimiser.step() # todo tensorboard logging or similar result = { "coords": model.get_coords().to("cpu"), } # Analyse analysis = model.analyse() logger.debug(f"Found minimum: {analysis}.") result.update(analysis) return result
def test_auto_neb(self): # Test AutoNEB procedure graph = MultiGraph() for idx, minimum in enumerate(self.minima): graph.add_node(idx + 1, **minimum) # Set up AutoNEB schedule spring_constant = float("inf") eval_config = EvalConfig(128) optim_config_1 = OptimConfig(10, SGD, {"lr": 0.1}, None, None, eval_config) optim_config_2 = OptimConfig(10, SGD, {"lr": 0.01}, None, None, eval_config) weight_decay = 0 subsample_pivot_count = 1 neb_configs = [ NEBConfig(spring_constant, weight_decay, equal, {"count": 2}, subsample_pivot_count, optim_config_1), NEBConfig(spring_constant, weight_decay, highest, {"count": 3, "key": "dense_train_loss"}, subsample_pivot_count, optim_config_1), NEBConfig(spring_constant, weight_decay, highest, {"count": 3, "key": "dense_train_loss"}, subsample_pivot_count, optim_config_2), NEBConfig(spring_constant, weight_decay, highest, {"count": 3, "key": "dense_train_loss"}, subsample_pivot_count, optim_config_2), ] auto_neb_config = AutoNEBConfig(neb_configs) self.assertEqual(auto_neb_config.cycle_count, len(neb_configs)) # Run AutoNEB auto_neb(1, 2, graph, self.model, auto_neb_config) self.assertEqual(len(graph.edges), auto_neb_config.cycle_count)
def test_long_run(self): eggcarton = Eggcarton(2) model = ModelWrapper(eggcarton) minima = [find_minimum(model, OptimConfig(1000, SGD, {"lr": 0.1}, None, None, None)) for _ in range(2)] neb_optim_config = OptimConfig(1000, SGD, {"lr": 0.1}, None, None, None) neb_config = NEBConfig(float("inf"), 1e-5, equal, {"count": 20}, 1, neb_optim_config) neb({ "path_coords": torch.cat([m["coords"].view(1, -1) for m in minima]), "target_distances": torch.ones(1) }, model, neb_config)
def test_neb(self): minima = self.minima[:2] neb_eval_config = EvalConfig(128) neb_optim_config = OptimConfig(10, Adam, {}, None, None, neb_eval_config) neb_config = NEBConfig(float("inf"), 1e-5, equal, {"count": 3}, 1, neb_optim_config) result = neb({ "path_coords": torch.cat([m["coords"].view(1, -1) for m in minima]), "target_distances": torch.ones(1) }, self.model, neb_config) required_keys = [ "path_coords", "target_distances", "saddle_train_error", "saddle_train_loss", "saddle_test_error", "saddle_test_loss", "dense_train_error", "dense_train_loss", "dense_test_error", "dense_test_loss", ] for key in required_keys: self.assertTrue(key in result, f"{key} not in result") value = result[key] self.assertFalse(torch.isnan(value).any().item(), f"{key} contains a NaN value") if "saddle_" in key: print(key, value.item())
def setUpClass(cls): super(TestAlgorithms, cls).setUpClass() cls.model = _create_xor_model() cls.model.to(cls.device) min_eval_config = EvalConfig(128) min_optim_config = OptimConfig(100, Adam, {}, None, None, min_eval_config) cls.minima = [find_minimum(cls.model, min_optim_config) for _ in range(2)]
def read_config_file(config_file: str): with open(config_file, "r") as file: config = safe_load(file) architecture, arguments = replace_instanciation(config["architecture"], models) if "dataset" in config: datasets, input_size, output_size = load_dataset(config["dataset"]) arguments["input_size"], arguments["output_size"] = input_size, output_size else: datasets = None model = architecture(**arguments) if datasets is not None: model = DataModel(CompareModel(model, NLLLoss()), datasets) model = ModelWrapper(model) model.to(config["device"]) minima_count = int(config["minima_count"]) min_config = OptimConfig.from_dict(config["minimum"]) lex_config = LandscapeExplorationConfig.from_dict(config["exploration"]) return model, minima_count, min_config, lex_config
def test_dataset_generation(self): transform = Compose([Pad(2), ToTensor()]) train_mnist = MNIST(join(dirname(__file__), "tmp/mnist"), True, transform, download=True) input_size = train_mnist[0][0].shape number_of_classes = 10 resnet = ResNet(20, input_size, number_of_classes) # Find a minimiser for the network optim_wrapper = ModelWrapper( DataModel(CompareModel(resnet, NLLLoss()), {"train": train_mnist})) optim_wrapper.to("cuda") optim_config = OptimConfig(100, SGD, {"lr": 0.1}, None, None, EvalConfig(128)) minimum = find_minimum(optim_wrapper, optim_config) optim_wrapper.set_coords_no_grad(minimum["coords"]) nim = NetworkInputModel(resnet, input_size, 0) nim.cuda() resnet.cuda() dataset = nim.generate_dataset(train_mnist, number_of_classes) self.assertEqual(len(dataset), 100)