Пример #1
0
def find_minimum(model: models.ModelWrapper,
                 optim_config: config.OptimConfig) -> dict:
    optimiser = optim_config.algorithm_type(
        model.parameters(),
        **optim_config.algorithm_args)  # type: optim.Optimizer

    # Scheduler
    if optim_config.scheduler_type is not None:
        scheduler = optim_config.scheduler_type(optimiser,
                                                **optim_config.scheduler_args)
    else:
        scheduler = None

    # Initialise
    model.initialise_randomly()
    if optim_config.eval_config is not None:
        model.adapt_to_config(optim_config.eval_config)

    # Optimise
    for _ in helper.pbar(range(optim_config.nsteps), "Find mimimum"):
        model.apply(gradient=True)
        if scheduler is not None:
            scheduler.step()
        optimiser.step()
        # todo tensorboard logging or similar
    result = {
        "coords": model.get_coords().to("cpu"),
    }

    # Analyse
    analysis = model.analyse()
    logger.debug(f"Found minimum: {analysis}.")
    result.update(analysis)
    return result
Пример #2
0
    def _test_model(self, nn_model):
        loss_model = CompareModel(nn_model, NLLLoss())
        data_model = DataModel(loss_model, {
            "train": self.train_mnist,
            "test": self.test_mnist
        })
        model = ModelWrapper(data_model)
        model.adapt_to_config(EvalConfig(1024))

        if cuda.is_available():
            pass  # model.to("cuda")
        analysis = model.analyse()

        for offset, data, size, is_buffer in model.iterate_params_buffers():
            print(size)

        for key, value in analysis.items():
            if "error" in key:
                self.assertLess(self.random_error * 0.9, value, f"Random {key} too low")
                self.assertGreater(self.random_error * 1.1, value, f"Random {key} too high")
        print(nn_model.__class__.__name__, analysis)