Beispiel #1
0
            (test_data_sh, test_data_t_sh)]
rbm_stack = RBMStack(num_dims, [500])
dbn = DBN(rbm_stack, 2)

batch_size = 100
max_epoch = 10
train_params = {
    'batch_size': batch_size,
    'learning_rate': 0.01,
    'cd_steps': 2,
    'max_epoch': max_epoch,
    'persistent': True,
    'finetune_learning_rate': 0.1
}

pre_fn = dbn.pretrain_fun(train_data_sh, train_params)

num_batches = train_data_sh.get_value(borrow=True).shape[0] / batch_size
for f in pre_fn:
    for ep in xrange(0, max_epoch):
        for b in xrange(0, num_batches):
            cost, cur_free_en, cur_gparam = f(b)
            print "Epoch # %d:%d cost: %f free energy: %f grad: %f" % (
                ep, b, cost, cur_free_en, cur_gparam)

train, valid, test = dbn.finetune_fun(datasets, train_params)
validation_frequency = 200
for ep in xrange(0, 30):
    for b in xrange(0, num_batches):
        cost = train(b)
        print "Epoch # %d:%d cost: %f" % (ep, b, cost)
datasets = [(train_data_sh, train_data_t_sh), (valid_data_sh, valid_data_t_sh), (test_data_sh, test_data_t_sh)]
rbm_stack = RBMStack(num_dims, [500])
dbn = DBN(rbm_stack, 2)

batch_size = 100
max_epoch = 10
train_params = {
    "batch_size": batch_size,
    "learning_rate": 0.01,
    "cd_steps": 2,
    "max_epoch": max_epoch,
    "persistent": True,
    "finetune_learning_rate": 0.1,
}

pre_fn = dbn.pretrain_fun(train_data_sh, train_params)

num_batches = train_data_sh.get_value(borrow=True).shape[0] / batch_size
for f in pre_fn:
    for ep in xrange(0, max_epoch):
        for b in xrange(0, num_batches):
            cost, cur_free_en, cur_gparam = f(b)
            print "Epoch # %d:%d cost: %f free energy: %f grad: %f" % (ep, b, cost, cur_free_en, cur_gparam)

train, valid, test = dbn.finetune_fun(datasets, train_params)
validation_frequency = 200
for ep in xrange(0, 30):
    for b in xrange(0, num_batches):
        cost = train(b)
        print "Epoch # %d:%d cost: %f" % (ep, b, cost)
        iter = ep * num_batches + b