Example #1
0
def find_minimum(model: models.ModelWrapper,
                 optim_config: config.OptimConfig) -> dict:
    optimiser = optim_config.algorithm_type(
        model.parameters(),
        **optim_config.algorithm_args)  # type: optim.Optimizer

    # Scheduler
    if optim_config.scheduler_type is not None:
        scheduler = optim_config.scheduler_type(optimiser,
                                                **optim_config.scheduler_args)
    else:
        scheduler = None

    # Initialise
    model.initialise_randomly()
    if optim_config.eval_config is not None:
        model.adapt_to_config(optim_config.eval_config)

    # Optimise
    for _ in helper.pbar(range(optim_config.nsteps), "Find mimimum"):
        model.apply(gradient=True)
        if scheduler is not None:
            scheduler.step()
        optimiser.step()
        # todo tensorboard logging or similar
    result = {
        "coords": model.get_coords().to("cpu"),
    }

    # Analyse
    analysis = model.analyse()
    logger.debug(f"Found minimum: {analysis}.")
    result.update(analysis)
    return result
Example #2
0
def _create_xor_model():
    mlp = MLP(2, 10, 2, 2, False)
    loss_model = CompareModel(mlp, NLLLoss())
    data_model = DataModel(loss_model, {
        "train": XORDataset(train=True),
        "test": XORDataset(train=False)
    })
    model = ModelWrapper(data_model)
    return model
Example #3
0
    def test_long_run(self):
        eggcarton = Eggcarton(2)
        model = ModelWrapper(eggcarton)
        minima = [find_minimum(model, OptimConfig(1000, SGD, {"lr": 0.1}, None, None, None)) for _ in range(2)]

        neb_optim_config = OptimConfig(1000, SGD, {"lr": 0.1}, None, None, None)
        neb_config = NEBConfig(float("inf"), 1e-5, equal, {"count": 20}, 1, neb_optim_config)
        neb({
            "path_coords": torch.cat([m["coords"].view(1, -1) for m in minima]),
            "target_distances": torch.ones(1)
        }, model, neb_config)
Example #4
0
def read_config_file(config_file: str):
    with open(config_file, "r") as file:
        config = safe_load(file)

    architecture, arguments = replace_instanciation(config["architecture"], models)
    if "dataset" in config:
        datasets, input_size, output_size = load_dataset(config["dataset"])
        arguments["input_size"], arguments["output_size"] = input_size, output_size
    else:
        datasets = None
    model = architecture(**arguments)
    if datasets is not None:
        model = DataModel(CompareModel(model, NLLLoss()), datasets)
    model = ModelWrapper(model)
    model.to(config["device"])

    minima_count = int(config["minima_count"])
    min_config = OptimConfig.from_dict(config["minimum"])
    lex_config = LandscapeExplorationConfig.from_dict(config["exploration"])

    return model, minima_count, min_config, lex_config
Example #5
0
    def _test_model(self, nn_model):
        loss_model = CompareModel(nn_model, NLLLoss())
        data_model = DataModel(loss_model, {
            "train": self.train_mnist,
            "test": self.test_mnist
        })
        model = ModelWrapper(data_model)
        model.adapt_to_config(EvalConfig(1024))

        if cuda.is_available():
            pass  # model.to("cuda")
        analysis = model.analyse()

        for offset, data, size, is_buffer in model.iterate_params_buffers():
            print(size)

        for key, value in analysis.items():
            if "error" in key:
                self.assertLess(self.random_error * 0.9, value, f"Random {key} too low")
                self.assertGreater(self.random_error * 1.1, value, f"Random {key} too high")
        print(nn_model.__class__.__name__, analysis)
Example #6
0
    def test_dataset_generation(self):
        transform = Compose([Pad(2), ToTensor()])
        train_mnist = MNIST(join(dirname(__file__), "tmp/mnist"),
                            True,
                            transform,
                            download=True)
        input_size = train_mnist[0][0].shape
        number_of_classes = 10
        resnet = ResNet(20, input_size, number_of_classes)

        # Find a minimiser for the network
        optim_wrapper = ModelWrapper(
            DataModel(CompareModel(resnet, NLLLoss()), {"train": train_mnist}))
        optim_wrapper.to("cuda")
        optim_config = OptimConfig(100, SGD, {"lr": 0.1}, None, None,
                                   EvalConfig(128))
        minimum = find_minimum(optim_wrapper, optim_config)
        optim_wrapper.set_coords_no_grad(minimum["coords"])

        nim = NetworkInputModel(resnet, input_size, 0)
        nim.cuda()
        resnet.cuda()
        dataset = nim.generate_dataset(train_mnist, number_of_classes)
        self.assertEqual(len(dataset), 100)