Beispiel #1
0
def train_kanade():
    print "Testing RBM"

    data_manager = store.StorageManager('Kanade/SimpleRBMTest')

    # Load mnist hand digits
    datasets = loader.load_kanade(n=500,
                                  set_name='25_25',
                                  emotions=['happy', 'sadness'],
                                  pre={'scale2unit': True})
    train_x, train_y = datasets[0]

    sparsity_constraint = True
    # Initialise the RBM and training parameters
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.001,
                               sparsity_constraint=sparsity_constraint,
                               sparsity_target=0.01,
                               sparsity_cost=1,
                               sparsity_decay=0.9,
                               dropout=True,
                               epochs=100)

    n_visible = train_x.get_value().shape[1]
    n_hidden = 500

    config = rbm_config.RBMConfig(
        v_n=n_visible,
        v2_n=n_visible,
        h_n=n_hidden,
        v_unit=rbm_units.GaussianVisibleUnit,
        associative=False,
        cd_type=rbm_config.CLASSICAL,
        cd_steps=1,
        train_params=tr,
        progress_logger=rbm_logger.ProgressLogger(img_shape=(25, 25)))

    rbm = RBM(config)

    print "... initialised RBM"

    # Train RBM
    rbm.train(train_x)

    # Test RBM
    rbm.reconstruct(train_x, k=5, plot_n=10, plot_every=1)

    # Store Parameters
    data_manager.persist(rbm)
def assess_rbm(clf, noisy_data, noisy_label, noisy_levels, tr_x, postfix=''):
    f_score = open('report{}.txt'.format(postfix), 'a')
    f_metric = open('metric{}.txt'.format(postfix), 'a')
    # Initialise architecture
    config = get_rbm_config(25, n_hidden=500, epochs=2)
    model = RBM(config)
    pred_table = {}
    for l in xrange(len(noisy_levels)):
        pred_table[l] = []
    for i in xrange(50):
        # Train architecture
        model.train(tr_x)

        j = 0
        for xs, ys in zip(noisy_data, noisy_label):
            recon_xs = model.reconstruct(xs, img_name='test_rbm')
            pred, metric = clf.get_score(recon_xs, ys, True)
            print pred
            print metric
            f_metric.write('{}25_25, Epoch:{}\n'.format(noisy_levels[j], i))
            f_metric.write(metric)
            pred_table[j].append(pred)
            j += 1
    for k in pred_table:
        f_score.write('{}:{}\n'.format(noisy_levels[k], pred_table[k]))
    f_score.close()
    f_metric.close()
def assess_rbm(clf, noisy_data, noisy_label, noisy_levels, tr_x,postfix=''):
    f_score = open('report{}.txt'.format(postfix), 'a')
    f_metric = open('metric{}.txt'.format(postfix), 'a')
    # Initialise architecture
    config = get_rbm_config(25, n_hidden=500, epochs=2)
    model = RBM(config)
    pred_table = {}
    for l in xrange(len(noisy_levels)):
        pred_table[l] = []
    for i in xrange(50):
        # Train architecture
        model.train(tr_x)

        j = 0
        for xs, ys in zip(noisy_data, noisy_label):
            recon_xs = model.reconstruct(xs, img_name='test_rbm')
            pred, metric = clf.get_score(recon_xs, ys, True)
            print pred
            print metric
            f_metric.write('{}25_25, Epoch:{}\n'.format(noisy_levels[j], i))
            f_metric.write(metric)
            pred_table[j].append(pred)
            j += 1
    for k in pred_table:
        f_score.write('{}:{}\n'.format(noisy_levels[k], pred_table[k]))
    f_score.close()
    f_metric.close()
def train_kanade():
    print "Testing RBM"

    data_manager = store.StorageManager('Kanade/SimpleRBMTest')

    # Load mnist hand digits
    datasets = loader.load_kanade(n=500, set_name='25_25', emotions=['happy', 'sadness'], pre={'scale2unit': True})
    train_x, train_y = datasets[0]

    sparsity_constraint = True
    # Initialise the RBM and training parameters
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.001,
                               sparsity_constraint=sparsity_constraint,
                               sparsity_target=0.01,
                               sparsity_cost=1,
                               sparsity_decay=0.9,
                               dropout=True,
                               epochs=100)

    n_visible = train_x.get_value().shape[1]
    n_hidden = 500

    config = rbm_config.RBMConfig(v_n=n_visible,
                                  v2_n=n_visible,
                                  h_n=n_hidden,
                                  v_unit=rbm_units.GaussianVisibleUnit,
                                  associative=False,
                                  cd_type=rbm_config.CLASSICAL,
                                  cd_steps=1,
                                  train_params=tr,
                                  progress_logger=rbm_logger.ProgressLogger(img_shape=(25, 25)))

    rbm = RBM(config)

    print "... initialised RBM"

    # Train RBM
    rbm.train(train_x)

    # Test RBM
    rbm.reconstruct(train_x, k=5, plot_n=10, plot_every=1)

    # Store Parameters
    data_manager.persist(rbm)
Beispiel #5
0
def test_rbm():
    print "Testing RBM"

    data_manager = store.StorageManager('TestRBM')
    # Load Cohn Kanade dataset
    datasets = loader.load_kanade(pre={'scale': True}, n=100, set_name='sharp_equi25_25')
    train_set_x, train_set_y = datasets[0]
    test_set_x, test_set_y = datasets[2]

    # Initilise the RBM and training parameters
    tr = TrainParam(learning_rate=0.0001,
                    momentum_type=NESTEROV,
                    momentum=0.5,
                    weight_decay=0.0001,
                    sparsity_constraint=True,
                    sparsity_target=0.01,
                    sparsity_cost=0.1,
                    sparsity_decay=0.9,
                    dropout=True,
                    dropout_rate=0.5,
                    batch_size=10,
                    epochs=10)

    n_visible = train_set_x.get_value(borrow=True).shape[1]
    n_hidden = 10

    config = RBMConfig()
    config.v_n = n_visible
    config.h_n = n_hidden
    config.v_unit = rbm_units.GaussianVisibleUnit
    # config.h_unit = rbm_units.ReLUnit
    config.progress_logger = ProgressLogger(img_shape=(25, 25))
    config.train_params = tr
    rbm = RBM(config)
    print "... initialised RBM"

    load = store.retrieve_object(str(rbm))
    if load:
        rbm = load

    for i in xrange(0, 1):
        # Train RBM
        rbm.train(train_set_x)
        data_manager.persist(rbm)

        # Test RBM Reconstruction via Linear Classifier
        clf = SimpleClassifier(classifier='logistic', train_x=train_set_x, train_y=train_set_y)
        recon_te = rbm.reconstruct(test_set_x, k=1, plot_n=100, plot_every=1,img_name='recon_te_{}.png'.format(i))

        print 'Original Score: {}'.format(clf.get_score(test_set_x, test_set_y))
        print 'Recon Score:    {}'.format(clf.get_score(recon_te, test_set_y.eval()))
# load distribution
gaussian = GaussianBinary(SIZE_VISIBLE, SIZE_HIDDEN)
gibbs = BlockGibbsSampler(gaussian, sampling_steps=1)
sgd = SGD(gaussian, learning_rate=0.001, weight_decay=0, momentum=0)
rbm = RBM(gaussian, gibbs, sgd)

# pyplot.figure(1)
# pyplot.ion()
# pyplot.show()
# vmin = numpy.min(dataset)
# vmax = numpy.max(dataset)

for epoch in range(EPOCHS):
    for b_idx in idx:
        batch = dataset[b_idx[0]:b_idx[1], :]
        d_weight_update, _, _ = rbm.train_batch(batch)
        rec_probs, rec_state = rbm.reconstruct(batch,steps=10)

        pyplot.clf()
        img = numpy.reshape(rec_state[-1,:], newshape=(28,28))

        print "Max: " + str(numpy.max(img)) + " Min: " + str(numpy.min(img))

        # pyplot.hist(d_weight_update)
        # pyplot.draw()
        # pyplot.matshow(img, fignum=0, cmap=pyplot.cm.gray, vmin=vmin , vmax=vmax)
        # pyplot.draw()
        # time.sleep(0.1)

raw_input()
Beispiel #7
0
def KanadeAssociativeRBM(cache=False, train_further=False):
    print "Testing Associative RBM which tries to learn the ID map "
    # print "Testing Associative RBM which tries to learn the following mapping: {anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}"
    # project set-up
    data_manager = store.StorageManager('Kanade/OptMFSparse0.01RBMTest',
                                        log=True)
    # data_manager = store.StorageManager('Kanade/OptAssociativeRBMTest', log=True)
    shape = 25
    dataset_name = 'sharp_equi{}_{}'.format(shape, shape)

    # Load kanade database
    mapping = None  # id map
    # mapping = {'anger': 'sadness', 'contempt': 'happy', 'disgust': 'sadness', 'fear': 'sadness', 'happy': 'happy',
    #            'sadness': 'sadness', 'surprise': 'happy'}
    train, valid, test = loader.load_kanade(pre={'scale': True},
                                            set_name=dataset_name)
    train_x, train_y = train
    test_x, test_y = test

    # Sample associated image
    train_x_mapped, train_y_mapped = loader.sample_image(train_y,
                                                         mapping=mapping,
                                                         pre={'scale': True},
                                                         set_name=dataset_name)
    test_x_mapped, test_y_mapped = loader.sample_image(test_y,
                                                       mapping=mapping,
                                                       pre={'scale': True},
                                                       set_name=dataset_name)

    # Concatenate images
    concat1 = T.concatenate([train_x, train_x_mapped], axis=1)
    # concat2 = T.concatenate([train_x_mapped, train_x], axis=1)
    # concat = T.concatenate([concat1, concat2], axis=0)
    # train_tX = theano.function([], concat)()
    train_tX = theano.function([], concat1)()
    train_X = theano.shared(train_tX)

    # Train classifier to be used for classifying reconstruction associated image layer
    # mapped_data = loader.load_kanade(#emotions=['sadness', 'happy'],
    #                                  pre={'scale': True},
    #                                  set_name=dataset_name)  # Target Image
    # clf_orig = SimpleClassifier('logistic', mapped_data[0][0], mapped_data[0][1])
    clf_orig = SimpleClassifier('logistic', train_x, train_y)

    # Initialise RBM
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.0001,
                               sparsity_constraint=True,
                               sparsity_target=0.01,
                               sparsity_cost=100,
                               sparsity_decay=0.9,
                               batch_size=10,
                               epochs=10)

    n_visible = shape * shape * 2
    n_hidden = 500

    config = rbm_config.RBMConfig()
    config.v_n = n_visible
    config.h_n = n_hidden
    config.v_unit = rbm_units.GaussianVisibleUnit
    # config.h_unit = rbm_units.ReLUnit
    config.progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2,
                                                                  shape))
    config.train_params = tr
    rbm = RBM(config)
    print "... initialised RBM"

    # Load RBM (test)
    loaded = data_manager.retrieve(str(rbm))
    if loaded:
        rbm = loaded
    else:
        rbm.set_initial_hidden_bias()
        rbm.set_hidden_mean_activity(train_X)

    # Train RBM - learn joint distribution
    # rbm.pretrain_lr(train_x, train_x01)
    for i in xrange(0, 10):
        if not cache or train_further:
            rbm.train(train_X)

        data_manager.persist(rbm)

        print "... reconstruction of associated images"
        # Get reconstruction with train data to get 'mapped' images to train classifiers on
        reconstruction = rbm.reconstruct(train_X,
                                         1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_train')
        reconstruct_assoc_part = reconstruction[:, (shape**2):]

        # Get associated images of test data
        nsamples = np.random.normal(0, 1,
                                    test_x.get_value(True).shape).astype(
                                        np.float32)
        initial_y = theano.shared(nsamples, name='initial_y')
        utils.save_images(nsamples[0:100], 'initialisation.png', (10, 10),
                          (25, 25))

        test_x_associated = rbm.reconstruct_association_opt(
            test_x,
            initial_y,
            5,
            0.,
            plot_n=100,
            plot_every=1,
            img_name='recon_test_gibbs')

        mf_recon = rbm.mean_field_inference_opt(test_x,
                                                y=initial_y,
                                                sample=False,
                                                k=10,
                                                img_name='recon_test_mf_raw')

        # Concatenate images
        test_MFX = theano.function([], T.concatenate([test_x, mf_recon],
                                                     axis=1))()
        test_MF = theano.shared(test_MFX)
        reconstruction = rbm.reconstruct(test_MF,
                                         1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_test_mf_recon')
        mf_recon = reconstruction[:, (shape**2):]

        print "... reconstructed"

        # Classify the reconstructions

        # 1. Train classifier on original images
        score_orig = clf_orig.get_score(test_x_associated,
                                        test_y_mapped.eval())
        score_orig_mf = clf_orig.get_score(test_x_associated,
                                           test_y_mapped.eval())

        # 2. Train classifier on reconstructed images
        clf_recon = SimpleClassifier('logistic', reconstruct_assoc_part,
                                     train_y_mapped.eval())
        score_retrain = clf_recon.get_score(test_x_associated,
                                            test_y_mapped.eval())
        score_retrain_mf = clf_recon.get_score(mf_recon, test_y_mapped.eval())

        out_msg = '{} (orig, retrain):{},{}'.format(rbm, score_orig,
                                                    score_retrain)
        out_msg2 = '{} (orig, retrain):{},{}'.format(rbm, score_orig_mf,
                                                     score_retrain_mf)
        print out_msg
        print out_msg2
def KanadeAssociativeRBM(cache=False, train_further=False):
    print "Testing Associative RBM which tries to learn the ID map "
    # print "Testing Associative RBM which tries to learn the following mapping: {anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}"
    # project set-up
    data_manager = store.StorageManager('Kanade/OptMFSparse0.01RBMTest', log=True)
    # data_manager = store.StorageManager('Kanade/OptAssociativeRBMTest', log=True)
    shape = 25
    dataset_name = 'sharp_equi{}_{}'.format(shape, shape)

    # Load kanade database
    mapping = None  # id map
    # mapping = {'anger': 'sadness', 'contempt': 'happy', 'disgust': 'sadness', 'fear': 'sadness', 'happy': 'happy',
    #            'sadness': 'sadness', 'surprise': 'happy'}
    train, valid, test = loader.load_kanade(pre={'scale': True}, set_name=dataset_name)
    train_x, train_y = train
    test_x, test_y = test

    # Sample associated image
    train_x_mapped, train_y_mapped = loader.sample_image(train_y, mapping=mapping, pre={'scale': True},
                                                         set_name=dataset_name)
    test_x_mapped, test_y_mapped = loader.sample_image(test_y, mapping=mapping, pre={'scale': True},
                                                       set_name=dataset_name)

    # Concatenate images
    concat1 = T.concatenate([train_x, train_x_mapped], axis=1)
    # concat2 = T.concatenate([train_x_mapped, train_x], axis=1)
    # concat = T.concatenate([concat1, concat2], axis=0)
    # train_tX = theano.function([], concat)()
    train_tX = theano.function([], concat1)()
    train_X = theano.shared(train_tX)

    # Train classifier to be used for classifying reconstruction associated image layer
    # mapped_data = loader.load_kanade(#emotions=['sadness', 'happy'],
    #                                  pre={'scale': True},
    #                                  set_name=dataset_name)  # Target Image
    # clf_orig = SimpleClassifier('logistic', mapped_data[0][0], mapped_data[0][1])
    clf_orig = SimpleClassifier('logistic', train_x, train_y)

    # Initialise RBM
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.0001,
                               sparsity_constraint=True,
                               sparsity_target=0.01,
                               sparsity_cost=100,
                               sparsity_decay=0.9,
                               batch_size=10,
                               epochs=10)

    n_visible = shape * shape * 2
    n_hidden = 500

    config = rbm_config.RBMConfig()
    config.v_n = n_visible
    config.h_n = n_hidden
    config.v_unit = rbm_units.GaussianVisibleUnit
    # config.h_unit = rbm_units.ReLUnit
    config.progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2, shape))
    config.train_params = tr
    rbm = RBM(config)
    print "... initialised RBM"

    # Load RBM (test)
    loaded = data_manager.retrieve(str(rbm))
    if loaded:
        rbm = loaded
    else:
        rbm.set_initial_hidden_bias()
        rbm.set_hidden_mean_activity(train_X)

    # Train RBM - learn joint distribution
    # rbm.pretrain_lr(train_x, train_x01)
    for i in xrange(0, 10):
        if not cache or train_further:
            rbm.train(train_X)

        data_manager.persist(rbm)

        print "... reconstruction of associated images"
        # Get reconstruction with train data to get 'mapped' images to train classifiers on
        reconstruction = rbm.reconstruct(train_X, 1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_train')
        reconstruct_assoc_part = reconstruction[:, (shape ** 2):]

        # Get associated images of test data
        nsamples = np.random.normal(0, 1, test_x.get_value(True).shape).astype(np.float32)
        initial_y = theano.shared(nsamples, name='initial_y')
        utils.save_images(nsamples[0:100], 'initialisation.png', (10, 10), (25, 25))

        test_x_associated = rbm.reconstruct_association_opt(test_x, initial_y,
                                                            5,
                                                            0.,
                                                            plot_n=100,
                                                            plot_every=1,
                                                            img_name='recon_test_gibbs')

        mf_recon = rbm.mean_field_inference_opt(test_x, y=initial_y,
                                                sample=False,
                                                k=10,
                                                img_name='recon_test_mf_raw')

        # Concatenate images
        test_MFX = theano.function([], T.concatenate([test_x, mf_recon], axis=1))()
        test_MF = theano.shared(test_MFX)
        reconstruction = rbm.reconstruct(test_MF, 1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_test_mf_recon')
        mf_recon = reconstruction[:, (shape ** 2):]

        print "... reconstructed"

        # Classify the reconstructions

        # 1. Train classifier on original images
        score_orig = clf_orig.get_score(test_x_associated, test_y_mapped.eval())
        score_orig_mf = clf_orig.get_score(test_x_associated, test_y_mapped.eval())

        # 2. Train classifier on reconstructed images
        clf_recon = SimpleClassifier('logistic', reconstruct_assoc_part, train_y_mapped.eval())
        score_retrain = clf_recon.get_score(test_x_associated, test_y_mapped.eval())
        score_retrain_mf = clf_recon.get_score(mf_recon, test_y_mapped.eval())

        out_msg = '{} (orig, retrain):{},{}'.format(rbm, score_orig, score_retrain)
        out_msg2 = '{} (orig, retrain):{},{}'.format(rbm, score_orig_mf, score_retrain_mf)
        print out_msg
        print out_msg2