示例#1
0
def generate_data_SFO_variations(num_passes=20, base_fname='figure_data_', store_x=True):
    """
    Same as generate_data(), but compares different variations of SFO to each
    other, rather than SFO to other optimizers.    
    """
    models_to_train = ( figures_models.logistic, figures_models.Hopfield )
    models_to_train = ( figures_models.logistic, ) #DEBUG
    
    for model_class in models_to_train:
        np.random.seed(0) # make experiments repeatable
        model = model_class()
        trainer = figures_train.figures_train(model)
        optimizers_to_use = [trainer.SFO_variations,] # DEBUG
        for optimizer in optimizers_to_use:
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)

            if not store_x:
                # delete the saved final x value so we don't run out of memory
                trainer.history['x'] = defaultdict(list)

            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            save_results(trainer, base_fname=base_fname)
def generate_data_SFO_N(num_passes=20, base_fname='num_minibatches', store_x=True):
    """
    Same as generate_data(), but compares SFO with different numbers of minibatches
    rather than SFO to other optimizers.
    """
    models_to_train = ( figures_models.logistic, figures_models.Hopfield )
    models_to_train = ( figures_models.logistic, ) # DEBUG

    # the different numbers of minibatches to experiment with
    N_set = np.round(np.logspace(0, np.log10(200), 6)).astype(int)
    #N_set = np.round(np.logspace(0, 2, 3)).astype(int)

    for model_class in models_to_train:
        # # first do LBFGS
        # np.random.seed(0) # make experiments repeatable
        # model = model_class(scale_by_N=False)
        # trainer = figures_train.figures_train(model, full_objective_per_pass=1)
        # optimizer = trainer.LBFGS
        # print("\n\n\n" + model.name + "\n" + str(optimizer))
        # optimizer(num_passes=num_passes)
        # save_results(trainer, base_fname=base_fname, store_x=store_x)

        # then do SFO with different minibatch sizes
        for N in N_set:
            np.random.seed(0) # make experiments repeatable
            model = model_class(num_subfunctions=N, scale_by_N=False)
            trainer = figures_train.figures_train(model, full_objective_per_pass=1)
            optimizer = trainer.SFO
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes, learner_name='SFO $N=%d$'%(N))
            save_results(trainer, base_fname=(base_fname+'_N=%d'%(N)), store_x=store_x)
示例#3
0
def generate_data(num_passes=20, base_fname='figure_data_', store_x=True):
    """
    train all the models in models_to_train using all the
    optimizers, and save the resulting function value traces
    in figure_data_*.npz files.
    """
    models_to_train = ( 
                        figures_models.toy,
                        figures_models.logistic,
                        figures_models.ContractiveAutoencoder,
                        figures_models.ICA,
                        figures_models.MLP_hard,
                        figures_models.MLP_soft,
                        figures_models.Hopfield,
                        figures_models.CIFARConvNet,
                        )
    models_to_train = ( figures_models.logistic, ) # DEBUG
    
    for model_class in models_to_train:
        np.random.seed(0) # make experiments repeatable
        model = model_class()
        trainer = figures_train.figures_train(model)
        optimizers_to_use = (
                                trainer.SFO,
                                trainer.LBFGS,
                                trainer.LBFGS_minibatch,
                                trainer.ADA,
                                trainer.SGD,
                                trainer.SAG,
                                trainer.SGD_momentum,
                            )
        for optimizer in optimizers_to_use:
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)

            if not store_x:
                # delete the saved final x value so we don't run out of memory
                trainer.history['x'] = defaultdict(list)

            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            save_results(trainer, base_fname=base_fname)