Example #1
0
def test_train():
    numpy.random.seed(1)

    training_data = synthetic_data(ROWS-30)
    validation_data = synthetic_data(30)
    testing_data = synthetic_data(10)
    
    data = (training_data, validation_data, testing_data)
    data = [(a, a.sum(axis=1)[:, numpy.newaxis]) for a in data]
    (training_data, validation_data, testing_data) = data
    (x, x_sums) = training_data

    smh = SMH(
            numpy_rng = numpy.random.RandomState(123),
            mean_doc_size = x.sum(axis=1).mean(), 
            first_layer_type = 'bernoulli', 
            n_ins = x.shape[1],
            mid_layer_sizes = [2],
            inner_code_length = 1,    
    )
    smh.train(training_data, validation_data, testing_data, 
                finetune_lr = 0.3, pretraining_epochs = 100, pretrain_lr = 0.01, training_epochs = 100, 
                method = 'cd', k = 1, noise_std_dev = 0, cost_method = 'squared_diff', 
                batch_size = 2, skip_trace_images=True, weights_file=None)
    
    layers = smh.export_model()

    for ((W,b),expectedW, expectedB) in zip(layers, previously_recorded_W, previously_recorded_b):
        assert numpy.allclose(W.get_value(), expectedW)
        assert numpy.allclose(b.get_value(), expectedB)
Example #2
0
def load_model(cost_method, n_ins=784,  mid_layer_sizes = [200],
               inner_code_length = 10, weights_file='data/last_smh_model_params.pkl.gz'):
    
    numpy_rng = numpy.random.RandomState(212)
    smh = SMH(numpy_rng = numpy_rng,  mid_layer_sizes = mid_layer_sizes, inner_code_length = inner_code_length, n_ins = n_ins)
    smh.unroll_layers(cost_method,0) #need to unroll before loading params so that we have right number of layers
    save_file=open(weights_file,'rb')
    smh_params = cPickle.load(save_file)
    save_file.close()
    smh.load_model(smh_params)
    return smh