Esempio n. 1
0
def _create_xor_model():
    mlp = MLP(2, 10, 2, 2, False)
    loss_model = CompareModel(mlp, NLLLoss())
    data_model = DataModel(loss_model, {
        "train": XORDataset(train=True),
        "test": XORDataset(train=False)
    })
    model = ModelWrapper(data_model)
    return model
Esempio n. 2
0
def read_config_file(config_file: str):
    with open(config_file, "r") as file:
        config = safe_load(file)

    architecture, arguments = replace_instanciation(config["architecture"], models)
    if "dataset" in config:
        datasets, input_size, output_size = load_dataset(config["dataset"])
        arguments["input_size"], arguments["output_size"] = input_size, output_size
    else:
        datasets = None
    model = architecture(**arguments)
    if datasets is not None:
        model = DataModel(CompareModel(model, NLLLoss()), datasets)
    model = ModelWrapper(model)
    model.to(config["device"])

    minima_count = int(config["minima_count"])
    min_config = OptimConfig.from_dict(config["minimum"])
    lex_config = LandscapeExplorationConfig.from_dict(config["exploration"])

    return model, minima_count, min_config, lex_config
Esempio n. 3
0
    def _test_model(self, nn_model):
        loss_model = CompareModel(nn_model, NLLLoss())
        data_model = DataModel(loss_model, {
            "train": self.train_mnist,
            "test": self.test_mnist
        })
        model = ModelWrapper(data_model)
        model.adapt_to_config(EvalConfig(1024))

        if cuda.is_available():
            pass  # model.to("cuda")
        analysis = model.analyse()

        for offset, data, size, is_buffer in model.iterate_params_buffers():
            print(size)

        for key, value in analysis.items():
            if "error" in key:
                self.assertLess(self.random_error * 0.9, value, f"Random {key} too low")
                self.assertGreater(self.random_error * 1.1, value, f"Random {key} too high")
        print(nn_model.__class__.__name__, analysis)
Esempio n. 4
0
    def test_dataset_generation(self):
        transform = Compose([Pad(2), ToTensor()])
        train_mnist = MNIST(join(dirname(__file__), "tmp/mnist"),
                            True,
                            transform,
                            download=True)
        input_size = train_mnist[0][0].shape
        number_of_classes = 10
        resnet = ResNet(20, input_size, number_of_classes)

        # Find a minimiser for the network
        optim_wrapper = ModelWrapper(
            DataModel(CompareModel(resnet, NLLLoss()), {"train": train_mnist}))
        optim_wrapper.to("cuda")
        optim_config = OptimConfig(100, SGD, {"lr": 0.1}, None, None,
                                   EvalConfig(128))
        minimum = find_minimum(optim_wrapper, optim_config)
        optim_wrapper.set_coords_no_grad(minimum["coords"])

        nim = NetworkInputModel(resnet, input_size, 0)
        nim.cuda()
        resnet.cuda()
        dataset = nim.generate_dataset(train_mnist, number_of_classes)
        self.assertEqual(len(dataset), 100)