Beispiel #1
0
    'max_epoch': max_epoch,
    'persistent': True,
    'finetune_learning_rate': 0.1
}

pre_fn = dbn.pretrain_fun(train_data_sh, train_params)

num_batches = train_data_sh.get_value(borrow=True).shape[0] / batch_size
for f in pre_fn:
    for ep in xrange(0, max_epoch):
        for b in xrange(0, num_batches):
            cost, cur_free_en, cur_gparam = f(b)
            print "Epoch # %d:%d cost: %f free energy: %f grad: %f" % (
                ep, b, cost, cur_free_en, cur_gparam)

train, valid, test = dbn.finetune_fun(datasets, train_params)
validation_frequency = 200
for ep in xrange(0, 30):
    for b in xrange(0, num_batches):
        cost = train(b)
        print "Epoch # %d:%d cost: %f" % (ep, b, cost)
        iter = ep * num_batches + b
        if (iter + 1) % validation_frequency == 0:
            validation_losses = valid()
            this_validation_loss = np.mean(validation_losses)
            print('epoch %i, minibatch %i/%i, validation error %f %%' %
                  (ep, b + 1, num_batches, this_validation_loss * 100.))
#num_datasets = [ num_train_data, num_valid_data, num_test_data ]

#data_sh = theano.shared(np.asarray(data, dtype=theano.config.floatX), borrow=True)
#data_target_sh = theano.shared(np.asarray(data_target, dtype=theano.config.floatX), borrow=True)
    "cd_steps": 2,
    "max_epoch": max_epoch,
    "persistent": True,
    "finetune_learning_rate": 0.1,
}

pre_fn = dbn.pretrain_fun(train_data_sh, train_params)

num_batches = train_data_sh.get_value(borrow=True).shape[0] / batch_size
for f in pre_fn:
    for ep in xrange(0, max_epoch):
        for b in xrange(0, num_batches):
            cost, cur_free_en, cur_gparam = f(b)
            print "Epoch # %d:%d cost: %f free energy: %f grad: %f" % (ep, b, cost, cur_free_en, cur_gparam)

train, valid, test = dbn.finetune_fun(datasets, train_params)
validation_frequency = 200
for ep in xrange(0, 30):
    for b in xrange(0, num_batches):
        cost = train(b)
        print "Epoch # %d:%d cost: %f" % (ep, b, cost)
        iter = ep * num_batches + b
        if (iter + 1) % validation_frequency == 0:
            validation_losses = valid()
            this_validation_loss = np.mean(validation_losses)
            print (
                "epoch %i, minibatch %i/%i, validation error %f %%"
                % (ep, b + 1, num_batches, this_validation_loss * 100.0)
            )
# num_datasets = [ num_train_data, num_valid_data, num_test_data ]