Esempio n. 1
0
def generate_data_SFO_N(num_passes=51, base_fname='num_minibatches', store_x=True):
    """
    Same as generate_data(), but compares SFO with different numbers of minibatches
    rather than SFO to other optimizers.
    """
    #models_to_train = ( models.logistic, models.Hopfield )
    #models_to_train = ( models.Hopfield, ) # DEBUG
    #models_to_train = ( models.logistic, ) # DEBUG
    models_to_train = ( models.MLP_soft, ) # DEBUG

    # the different numbers of history terms to experiment with
    L_set = [5,10,20]

    for model_class in models_to_train:
        # # first do LBFGS
        # np.random.seed(0) # make experiments repeatable
        # model = model_class(scale_by_N=False)
        # trainer = optimization_wrapper.train(model, full_objective_per_pass=1)
        # optimizer = trainer.LBFGS
        # print("\n\n\n" + model.name + "\n" + str(optimizer))
        # optimizer(num_passes=num_passes)
        # save_results(trainer, base_fname=base_fname, store_x=store_x)

        # then do SFO with different minibatch sizes
        for L in L_set:
            np.random.seed(0) # make experiments repeatable
            model = model_class()
            trainer = optimization_wrapper.train(model)
            optimizer = trainer.SFO
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes, max_history_terms=L, learner_name='SFO $L=%d$'%(L))
            convergence_utils.save_results(trainer, base_fname=(base_fname+'_N=%d'%(L)), store_x=store_x)
def generate_data_SFO_N(num_passes=51, base_fname='num_minibatches', store_x=True):
    """
    Same as generate_data(), but compares SFO with different numbers of minibatches
    rather than SFO to other optimizers.
    """
    models_to_train = ( models.logistic, models.Hopfield )
    models_to_train = ( models.logistic, ) # DEBUG

    # the different numbers of minibatches to experiment with
    N_set = np.round(np.logspace(0, np.log10(200), 6)).astype(int)
    #N_set = np.round(np.logspace(0, 2, 3)).astype(int)

    for model_class in models_to_train:
        # # first do LBFGS
        # np.random.seed(0) # make experiments repeatable
        # model = model_class(scale_by_N=False)
        # trainer = optimization_wrapper.train(model, full_objective_per_pass=1)
        # optimizer = trainer.LBFGS
        # print("\n\n\n" + model.name + "\n" + str(optimizer))
        # optimizer(num_passes=num_passes)
        # save_results(trainer, base_fname=base_fname, store_x=store_x)

        # then do SFO with different minibatch sizes
        for N in N_set:
            np.random.seed(0) # make experiments repeatable
            model = model_class(num_subfunctions=N, scale_by_N=False)
            trainer = optimization_wrapper.train(model, full_objective_per_pass=1)
            optimizer = trainer.SFO
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes, learner_name='SFO $N=%d$'%(N))
            convergence_utils.save_results(trainer, base_fname=(base_fname+'_N=%d'%(N)), store_x=store_x)
def generate_data_SFO_variations(num_passes=51,
                                 base_fname='convergence_variations',
                                 store_x=True):
    """
    Same as generate_data(), but compares different variations of SFO to each
    other, rather than SFO to other optimizers.
    """
    models_to_train = (models.logistic, models.Hopfield)
    models_to_train = (models.logistic, )  #DEBUG

    for model_class in models_to_train:
        np.random.seed(0)  # make experiments repeatable
        model = model_class()
        trainer = optimization_wrapper.train(model)
        optimizers_to_use = [
            trainer.SFO_variations,
        ]
        for optimizer in optimizers_to_use:
            np.random.seed(0)  # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)
            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            convergence_utils.save_results(trainer,
                                           base_fname=base_fname,
                                           store_x=store_x)
def generate_data(num_passes=51, base_fname='figure_data_', store_x=True):
    """
    train all the models in models_to_train using all the
    optimizers, and save the resulting function value traces
    in figure_data_*.npz files.
    """
    models_to_train = (
        #models.DeepAE,
        models.CIFARConvNet,
        models.ICA,
        models.toy,
        models.logistic,
        models.MLP_soft,
        # models.MLP_hard,
        models.ContractiveAutoencoder,
        models.Hopfield,
    )
    models_to_train = (models.logistic, )  # DEBUG

    for model_class in models_to_train:
        np.random.seed(0)  # make experiments repeatable
        model = model_class()
        trainer = optimization_wrapper.train(model)
        optimizers_to_use = (
            trainer.SFO,
            trainer.LBFGS,
            trainer.LBFGS_minibatch,
            trainer.ADA,
            trainer.SGD,
            trainer.SAG,
            trainer.SGD_momentum,
        )
        for optimizer in optimizers_to_use:
            np.random.seed(0)  # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)
            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            convergence_utils.save_results(trainer,
                                           base_fname=base_fname,
                                           store_x=store_x)
def generate_data(num_passes=51, base_fname='figure_data_', store_x=True):
    """
    train all the models in models_to_train using all the
    optimizers, and save the resulting function value traces
    in figure_data_*.npz files.
    """
    models_to_train = ( 
                        #models.DeepAE,
                        models.CIFARConvNet,
                        models.ICA,
                        models.toy,
                        models.logistic,
                        models.MLP_soft,
                        # models.MLP_hard,
                        models.ContractiveAutoencoder,
                        models.Hopfield,
                        )
    models_to_train = ( models.logistic, ) # DEBUG
    
    for model_class in models_to_train:
        np.random.seed(0) # make experiments repeatable
        model = model_class()
        trainer = optimization_wrapper.train(model)
        optimizers_to_use = (
                                trainer.SFO,
                                trainer.LBFGS,
                                trainer.LBFGS_minibatch,
                                trainer.ADA,
                                trainer.SGD,
                                trainer.SAG,
                                trainer.SGD_momentum,
                            )
        for optimizer in optimizers_to_use:
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)
            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            convergence_utils.save_results(trainer, base_fname=base_fname, store_x=store_x)
def generate_data_SFO_variations(num_passes=51, base_fname='convergence_variations', store_x=True):
    """
    Same as generate_data(), but compares different variations of SFO to each
    other, rather than SFO to other optimizers.
    """
    models_to_train = ( models.logistic, models.Hopfield )
    models_to_train = ( models.logistic, ) #DEBUG
    
    for model_class in models_to_train:
        np.random.seed(0) # make experiments repeatable
        model = model_class()
        trainer = optimization_wrapper.train(model)
        optimizers_to_use = [trainer.SFO_variations,]
        for optimizer in optimizers_to_use:
            np.random.seed(0) # make experiments exactly repeatable
            print("\n\n\n" + model.name + "\n" + str(optimizer))
            optimizer(num_passes=num_passes)
            # save_results doesn't need to be called until outside this loop,
            # but this way we can peak at partial results
            # also, note that saved files for the same model but different optimizers
            # can be combined in plots, just by calling load_results with all the saved files
            convergence_utils.save_results(trainer, base_fname=base_fname, store_x=store_x)