Esempio n. 1
0
def main():
    global outdir

    set_backend("gnumpy")
    
    report_args = { 'verbose'   : True,
                    'interval'  : 3,       # how many epochs between progress reports (larger is faster)
                    'interval_rate' : 1.35,
                    'visualize' : True,
                    'log_mode'  : 'html_anim' }

    data = load_mnist(digits=[5,6,8,9],
                      split=[30,10,0])

    settings_name = "basic2"

    ######################################################
    # Generate a list of training configurations to try, 
    # and loop over them.
    #
    settings,prefix = make_train_settings(settings_name)
    settings = enumerate_settings(settings)
    index = 801
    index_end = 1000
    settings = settings[index-1:index_end]
    num_restarts = 1  # How many random restarts do we try with the same parameters

    open_logfile("gen_trainees-%s" % prefix,"%d total variants; %d restarts each" % (len(settings),num_restarts))
    settingsfile = '%s/%s-settings.pkl' % (outdir,prefix)
    quickdump(settingsfile,settings)
    for setting in settings:
        print ('\n\n-------------------- %s-%d --------------------' % (prefix,index))
        print setting['model']
        print setting['train']

        for rep in range(num_restarts):
            # Create the model and scale the data if necessary
            model = make_model(setting['model'],data.Xshape,data.Yshape)
            data.rescale(model.ideal_domain(),model.ideal_range())

            # Set up a callback to collect snapshots of the training run
            snapshots = []
            report_args['callback'] = lambda event,status: report_callback(setting,snapshots,model,event,status)

            # Train the model
            trainer = TrainingRun(model,data,report_args,**setting['train'])
            trainer.train()
            trainer = None # force gc on any Tk window objects

            # Save the training run to disk
            save_trainee(snapshots,setting,prefix,index); 
            index += 1

    #####################################################
    
    close_logfile()
    raw_input()
Esempio n. 2
0
def main():

    set_backend("gnumpy")
    #set_gradcheck_mode(True)

    ######################################################
    # Load MNIST dataset
    tic()
    data = load_mnist(digits=range(10),
                      split=[85,0,15],
                      #split=[30,0,0]   # for faster training when debugging
                      )
    print ("Data loaded in %.1fs" % toc())

    ######################################################
    # Create a neural network with matching input/output dimensions
    cfg = NeuralNetCfg(L1=1e-7*0,init_scale=0.1**2)
    cfg.input(data.Xshape,dropout=0.2)
    cfg.hidden(800,"logistic",dropout=0.5,maxnorm=4.0)
    cfg.hidden(800,"logistic",dropout=0.5,maxnorm=4.0)
    cfg.output(data.Yshape,"softmax",maxnorm=4.0)

    model = NeuralNet(cfg)

    ######################################################
    # Rescale the data to match the network's domain/range
    data.rescale(model.ideal_domain(),model.ideal_range())

    ######################################################
    # Train the network
    report_args = { 'verbose'   : True,
                    'interval'  : 10,       # how many epochs between progress reports (larger is faster)
                    'visualize' : True}

    trainer = TrainingRun(model,data,report_args,
                          learn_rate=10,
                          learn_rate_decay=.998,
                          momentum=[(1,.5),(400,0.99)],
                          batchsize=128)

    tic()
    trainer.train(3000)
    print ("Training took %.1fs" % toc())

    #####################################################
    
    if get_gradcheck_mode():
        model.gradcheck(data.train)

    raw_input()
Esempio n. 3
0
def main(viz=False):

    tic()
    data = load_mnist()
    print ("Data loaded in %.1fs" % toc())

    # Create a neural network with matching input/output dimensions
    #
    cfg = NeuralNetCfg(L1=1e-6,init_scale=0.05)
    cfg.input(data.Xshape)
    cfg.hidden(800,"logistic",dropout=0.5)
    cfg.hidden(800,"logistic",dropout=0.25)
    cfg.output(data.Yshape,"softmax")

    model = NeuralNet(cfg)

    # Rescale the data to match the network's domain/range
    #
    data.rescale(model.ideal_domain(),model.ideal_range())

    # Train the network
    #
    report_args = { 'verbose'   : True,
                    'interval'  : 5,       # how many epochs between progress reports (larger is faster)
                    'window_size' : "compact",
                    'visualize' : viz}

    trainer = TrainingRun(model,data,report_args,
                          learn_rate=2,
                          learn_rate_decay=.995,
                          momentum=[(0,.5),(400,0.9)],
                          batchsize=64)

    print "Memory available after data loaded:", memory_info(gc=True)

    tic()
    trainer.train(100)  # train for several epochs
    print ("Training took %.1fs" % toc())