Exemple #1
0
    def test_set_up(self):
        tr = rbm_config.TrainParam(learning_rate=0.05,
                                   momentum_type=rbm_config.CLASSICAL,
                                   momentum=0.5,
                                   weight_decay=0,
                                   sparsity_constraint=False,
                                   sparsity_target=0.1**9,
                                   sparsity_cost=10**8,
                                   sparsity_decay=0.9,
                                   epochs=5)

        config = rbm_config.RBMConfig()
        config.v_n = 3
        config.h_n = 2
        config.v_unit = rbm_units.GaussianVisibleUnit
        config.progress_logger = rbm_logger.ProgressLogger()
        config.train_params = tr
        np_rand = np.random.RandomState(123)

        # Weights
        W = np_rand.uniform(low=-1. / 10, high=-1. / 10,
                            size=(3, 2)).astype(np.float32)
        vb = np.array([-0.1, 0, 0.1], dtype=np.float32)
        hb = np.array([0.01, -0.01], dtype=np.float32)
        Wt = theano.shared(W, name='W')
        vbt = theano.shared(vb, name='vbias')
        hbt = theano.shared(hb, name='hbias')
        g_rbm = rbm.RBM(config, W=Wt, h_bias=hbt, v_bias=vbt)
        self.assertTrue(g_rbm)
        self.assertTrue(isinstance(g_rbm.v_unit,
                                   rbm_units.GaussianVisibleUnit))
        self.assertTrue(isinstance(g_rbm.h_unit, rbm_units.RBMUnit))
        self.assertTrue(
            np.count_nonzero(g_rbm.W.get_value(borrow=True) - W) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.v_bias.get_value(borrow=True) - vb) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.h_bias.get_value(borrow=True) - hb) == 0)

        x = sklearn.preprocessing.scale(
            np.array([[200.0, 188., 7.], [150.0, 128., 0.], [250.0, 98., 3.]],
                     dtype=theano.config.floatX))
        v = theano.shared(x)
        _, _, h = g_rbm.sample_h_given_v(v)
        _, _, vs = g_rbm.sample_v_given_h(h)
        _, _, hs = g_rbm.sample_h_given_v(vs)
        dw = T.dot(v.T, h) - T.dot(vs.T, hs)
        dv = T.sum(v - vs, axis=0)
        dh = T.sum(h - hs, axis=0)
        gr = g_rbm.get_partial_derivatives(v, None)['gradients']
        gdw, gdv, gdh = gr[0], gr[1], gr[2]
        print gdw, gdv, gdh
        compute_derivative = theano.function([], [dw, dv, dh, gdw, gdv, gdh])
        for i in xrange(20):
            a, b, c, d, e, f = compute_derivative()
            # print a, b, c
            print d, e, f
Exemple #2
0
def train_kanade():
    print "Testing RBM"

    data_manager = store.StorageManager('Kanade/SimpleRBMTest')

    # Load mnist hand digits
    datasets = loader.load_kanade(n=500,
                                  set_name='25_25',
                                  emotions=['happy', 'sadness'],
                                  pre={'scale2unit': True})
    train_x, train_y = datasets[0]

    sparsity_constraint = True
    # Initialise the RBM and training parameters
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.001,
                               sparsity_constraint=sparsity_constraint,
                               sparsity_target=0.01,
                               sparsity_cost=1,
                               sparsity_decay=0.9,
                               dropout=True,
                               epochs=100)

    n_visible = train_x.get_value().shape[1]
    n_hidden = 500

    config = rbm_config.RBMConfig(
        v_n=n_visible,
        v2_n=n_visible,
        h_n=n_hidden,
        v_unit=rbm_units.GaussianVisibleUnit,
        associative=False,
        cd_type=rbm_config.CLASSICAL,
        cd_steps=1,
        train_params=tr,
        progress_logger=rbm_logger.ProgressLogger(img_shape=(25, 25)))

    rbm = RBM(config)

    print "... initialised RBM"

    # Train RBM
    rbm.train(train_x)

    # Test RBM
    rbm.reconstruct(train_x, k=5, plot_n=10, plot_every=1)

    # Store Parameters
    data_manager.persist(rbm)
def test_rbm():
    print "Testing RBM"

    # Load mnist hand digits
    datasets = mnist_loader.load_digits(n=[100, 0, 100], digits=[1])
    train_set_x, train_set_y = datasets[0]
    test_set_x, test_set_y = datasets[2]

    # Initialise the RBM and training parameters
    tr = rbm_config.TrainParam(learning_rate=0.01,
                               momentum_type=NESTEROV,
                               momentum=0.5,
                               weight_decay=0.01,
                               sparsity_constraint=True,
                               sparsity_target=0.01,
                               sparsity_cost=0.01,
                               sparsity_decay=0.1)

    n_visible = train_set_x.get_value().shape[1]
    n_hidden = 2

    config = rbm_config.RBMConfig(v_n=n_visible,
                                  v2_n=n_visible,
                                  h_n=n_hidden,
                                  associative=False,
                                  cd_type=CLASSICAL,
                                  cd_steps=1,
                                  train_params=tr,
                                  progress_logger=ProgressLogger())
    rbm = RBM(config)

    print "... initialised RBM"

    curr_dir = store.move_to(str(rbm))
    print "... moved to {}".format(curr_dir)

    # Train RBM
    rbm.train(train_set_x)

    # Test RBM
    rbm.reconstruct(test_set_x, k=1, plot_n=20)
Exemple #4
0
    def test_full(self):
        train, valid, test = mnist_loader.load_digits(n=[100, 0, 100],
                                                      pre={'scale': True})
        train_x, train_y = train
        valid_x, valid_y = valid
        test_x, test_y = test

        tr = rbm_config.TrainParam(learning_rate=0.0001,
                                   momentum_type=rbm.CLASSICAL,
                                   momentum=0.5,
                                   weight_decay=0.01,
                                   sparsity_constraint=False,
                                   sparsity_target=0.01,
                                   sparsity_cost=0.01,
                                   sparsity_decay=0.1,
                                   epochs=10)

        n_visible = train_x.get_value().shape[1]
        n_hidden = 100

        gaussian_rbm = rbm.GaussianRBM(
            n_visible,
            n_visible,
            n_hidden,
            associative=False,
            cd_type=rbm.CLASSICAL,
            cd_steps=1,
            visible_unit=rbm_units.GaussianVisibleUnit,
            hidden_unit=rbm_units.RBMUnit,
            train_parameters=tr,
            progress_logger=rbm_logger.ProgressLogger())

        curr_dir = store.move_to('simple_gaussian_rbm_test')
        print "... moved to {}".format(curr_dir)

        # Train RBM
        gaussian_rbm.train(train_x)
Exemple #5
0
    def test_digits(self):
        tr = rbm_config.TrainParam(learning_rate=0.05,
                                   momentum_type=rbm_config.CLASSICAL,
                                   momentum=0.5,
                                   weight_decay=0,
                                   sparsity_constraint=False,
                                   sparsity_target=0.1**9,
                                   sparsity_cost=10**8,
                                   sparsity_decay=0.9,
                                   epochs=5)

        config = rbm_config.RBMConfig()
        config.v_n = 784
        config.h_n = 100
        config.v_unit = rbm_units.GaussianVisibleUnit
        config.h_unit = rbm_units.ReLUnit
        config.progress_logger = rbm_logger.ProgressLogger()
        config.train_params = tr
        np_rand = np.random.RandomState(123)

        # Weights
        W = np_rand.uniform(low=-1. / 10, high=1. / 10,
                            size=(784, 100)).astype(np.float32)
        vb = np.zeros(784, dtype=np.float32)
        hb = np.array(100, dtype=np.float32)
        Wt = theano.shared(W, name='W')
        vbt = theano.shared(vb, name='vbias')
        hbt = theano.shared(hb, name='hbias')
        g_rbm = rbm.RBM(config, W=Wt, h_bias=hbt, v_bias=vbt)
        self.assertTrue(g_rbm)
        self.assertTrue(isinstance(g_rbm.v_unit,
                                   rbm_units.GaussianVisibleUnit))
        self.assertTrue(isinstance(g_rbm.h_unit, rbm_units.RBMUnit))
        self.assertTrue(
            np.count_nonzero(g_rbm.W.get_value(borrow=True) - W) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.v_bias.get_value(borrow=True) - vb) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.h_bias.get_value(borrow=True) - hb) == 0)

        tr, vl, te = mnist_loader.load_digits(n=[5, 10, 10],
                                              pre={'scale': True})
        v = tr[0]

        # print 'inputs:'
        # table = ss.itemfreq(v.get_value(borrow=True))
        # x = [pt[0] for pt in table]
        # y = [pt[1] for pt in table]
        # plt.plot(x, y)
        # plt.show()

        # v = theano.shared(x)
        _, _, h = g_rbm.sample_h_given_v(v)
        _, _, vs = g_rbm.sample_v_given_h(h)
        _, _, hs = g_rbm.sample_h_given_v(vs)
        dw = T.dot(v.T, h) - T.dot(vs.T, hs)
        dv = T.sum(v - vs, axis=0)
        dh = T.sum(h - hs, axis=0)
        gr = g_rbm.get_partial_derivatives(v, None)['gradients']
        gdw, gdv, gdh = gr[0], gr[1], gr[2]
        print gdw, gdv, gdh
        compute_derivative = theano.function([], [dw, dv, dh, gdw, gdv, gdh])
        for i in xrange(1):
            a, b, c, d, e, f = compute_derivative()
            # print a, b, c
            print 'unfold'
            print a[0], b[1:5], c[1:5]

            print 'rbm'
            print d[0], e[1:5], f[1:5]
Exemple #6
0
    def test_kanades_grrbm(self):
        nvis = 625
        nhid = 1000
        train_n = 10000
        batch_n = 20

        tr = rbm_config.TrainParam(learning_rate=0.05,
                                   momentum_type=rbm_config.CLASSICAL,
                                   momentum=0.5,
                                   weight_decay=0,
                                   sparsity_constraint=False,
                                   sparsity_target=0.1**9,
                                   sparsity_cost=10**8,
                                   sparsity_decay=0.9,
                                   epochs=5,
                                   batch_size=batch_n)

        config = rbm_config.RBMConfig()
        config.v_n = nvis
        config.h_n = nhid
        config.v_unit = rbm_units.GaussianVisibleUnit
        config.h_unit = rbm_units.ReLUnit
        config.progress_logger = rbm_logger.ProgressLogger()
        config.train_params = tr
        np_rand = np.random.RandomState(123)

        # Weights
        W = np_rand.normal(0, 0.01, size=(nvis, nhid)).astype(np.float32)
        vb = np.zeros(nvis, dtype=np.float32)
        hb = np.zeros(nhid, dtype=np.float32)
        Wt = theano.shared(W, name='W')
        vbt = theano.shared(vb, name='vbias')
        hbt = theano.shared(hb, name='hbias')
        g_rbm = rbm.RBM(config, W=Wt, h_bias=hbt, v_bias=vbt)
        self.assertTrue(g_rbm)
        self.assertTrue(isinstance(g_rbm.v_unit,
                                   rbm_units.GaussianVisibleUnit))
        self.assertTrue(isinstance(g_rbm.h_unit, rbm_units.RBMUnit))
        self.assertTrue(
            np.count_nonzero(g_rbm.W.get_value(borrow=True) - W) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.v_bias.get_value(borrow=True) - vb) == 0)
        self.assertTrue(
            np.count_nonzero(g_rbm.h_bias.get_value(borrow=True) - hb) == 0)

        tr, vl, te = k_loader.load_kanade(n=batch_n, pre={'scale': True})
        v = tr[0]

        vv = v.get_value(borrow=True).ravel()
        table = ss.itemfreq(vv)
        print table
        x = [pt[0] for pt in table]
        y = [pt[1] for pt in table]
        plt.plot(x, y)
        plt.show()

        # v = theano.shared(x)
        h = g_rbm.h_unit.activate(g_rbm.h_unit.scale(T.dot(v, W) + hb))

        _, _, h = g_rbm.sample_h_given_v(v)
        _, _, vs = g_rbm.sample_v_given_h(h)
        _, _, hs = g_rbm.sample_h_given_v(vs)

        dw = (T.dot(v.T, h) - T.dot(vs.T, hs)) / batch_n
        dv = T.mean(v - vs, axis=0)
        dh = T.mean(h - hs, axis=0)
        gr = g_rbm.get_partial_derivatives(v, None)['gradients']
        gdw, gdv, gdh = gr[0], gr[1], gr[2]
        print gdw, gdv, gdh
        compute_derivative = theano.function([], [dw, dv, dh, gdw, gdv, gdh])
        for i in xrange(1):
            a, b, c, d, e, f = compute_derivative()
            # print a, b, c
            print 'unfold'
            print a[0][0:5], b[1:5], c[1:5]

            print 'rbm'
            print d[0][0:5], e[1:5], f[1:5]
Exemple #7
0
def KanadeAssociativeRBM(cache=False, train_further=False):
    print "Testing Associative RBM which tries to learn the ID map "
    # print "Testing Associative RBM which tries to learn the following mapping: {anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}"
    # project set-up
    data_manager = store.StorageManager('Kanade/OptMFSparse0.01RBMTest',
                                        log=True)
    # data_manager = store.StorageManager('Kanade/OptAssociativeRBMTest', log=True)
    shape = 25
    dataset_name = 'sharp_equi{}_{}'.format(shape, shape)

    # Load kanade database
    mapping = None  # id map
    # mapping = {'anger': 'sadness', 'contempt': 'happy', 'disgust': 'sadness', 'fear': 'sadness', 'happy': 'happy',
    #            'sadness': 'sadness', 'surprise': 'happy'}
    train, valid, test = loader.load_kanade(pre={'scale': True},
                                            set_name=dataset_name)
    train_x, train_y = train
    test_x, test_y = test

    # Sample associated image
    train_x_mapped, train_y_mapped = loader.sample_image(train_y,
                                                         mapping=mapping,
                                                         pre={'scale': True},
                                                         set_name=dataset_name)
    test_x_mapped, test_y_mapped = loader.sample_image(test_y,
                                                       mapping=mapping,
                                                       pre={'scale': True},
                                                       set_name=dataset_name)

    # Concatenate images
    concat1 = T.concatenate([train_x, train_x_mapped], axis=1)
    # concat2 = T.concatenate([train_x_mapped, train_x], axis=1)
    # concat = T.concatenate([concat1, concat2], axis=0)
    # train_tX = theano.function([], concat)()
    train_tX = theano.function([], concat1)()
    train_X = theano.shared(train_tX)

    # Train classifier to be used for classifying reconstruction associated image layer
    # mapped_data = loader.load_kanade(#emotions=['sadness', 'happy'],
    #                                  pre={'scale': True},
    #                                  set_name=dataset_name)  # Target Image
    # clf_orig = SimpleClassifier('logistic', mapped_data[0][0], mapped_data[0][1])
    clf_orig = SimpleClassifier('logistic', train_x, train_y)

    # Initialise RBM
    tr = rbm_config.TrainParam(learning_rate=0.0001,
                               momentum_type=rbm_config.NESTEROV,
                               momentum=0.9,
                               weight_decay=0.0001,
                               sparsity_constraint=True,
                               sparsity_target=0.01,
                               sparsity_cost=100,
                               sparsity_decay=0.9,
                               batch_size=10,
                               epochs=10)

    n_visible = shape * shape * 2
    n_hidden = 500

    config = rbm_config.RBMConfig()
    config.v_n = n_visible
    config.h_n = n_hidden
    config.v_unit = rbm_units.GaussianVisibleUnit
    # config.h_unit = rbm_units.ReLUnit
    config.progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2,
                                                                  shape))
    config.train_params = tr
    rbm = RBM(config)
    print "... initialised RBM"

    # Load RBM (test)
    loaded = data_manager.retrieve(str(rbm))
    if loaded:
        rbm = loaded
    else:
        rbm.set_initial_hidden_bias()
        rbm.set_hidden_mean_activity(train_X)

    # Train RBM - learn joint distribution
    # rbm.pretrain_lr(train_x, train_x01)
    for i in xrange(0, 10):
        if not cache or train_further:
            rbm.train(train_X)

        data_manager.persist(rbm)

        print "... reconstruction of associated images"
        # Get reconstruction with train data to get 'mapped' images to train classifiers on
        reconstruction = rbm.reconstruct(train_X,
                                         1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_train')
        reconstruct_assoc_part = reconstruction[:, (shape**2):]

        # Get associated images of test data
        nsamples = np.random.normal(0, 1,
                                    test_x.get_value(True).shape).astype(
                                        np.float32)
        initial_y = theano.shared(nsamples, name='initial_y')
        utils.save_images(nsamples[0:100], 'initialisation.png', (10, 10),
                          (25, 25))

        test_x_associated = rbm.reconstruct_association_opt(
            test_x,
            initial_y,
            5,
            0.,
            plot_n=100,
            plot_every=1,
            img_name='recon_test_gibbs')

        mf_recon = rbm.mean_field_inference_opt(test_x,
                                                y=initial_y,
                                                sample=False,
                                                k=10,
                                                img_name='recon_test_mf_raw')

        # Concatenate images
        test_MFX = theano.function([], T.concatenate([test_x, mf_recon],
                                                     axis=1))()
        test_MF = theano.shared(test_MFX)
        reconstruction = rbm.reconstruct(test_MF,
                                         1,
                                         plot_n=100,
                                         plot_every=1,
                                         img_name='recon_test_mf_recon')
        mf_recon = reconstruction[:, (shape**2):]

        print "... reconstructed"

        # Classify the reconstructions

        # 1. Train classifier on original images
        score_orig = clf_orig.get_score(test_x_associated,
                                        test_y_mapped.eval())
        score_orig_mf = clf_orig.get_score(test_x_associated,
                                           test_y_mapped.eval())

        # 2. Train classifier on reconstructed images
        clf_recon = SimpleClassifier('logistic', reconstruct_assoc_part,
                                     train_y_mapped.eval())
        score_retrain = clf_recon.get_score(test_x_associated,
                                            test_y_mapped.eval())
        score_retrain_mf = clf_recon.get_score(mf_recon, test_y_mapped.eval())

        out_msg = '{} (orig, retrain):{},{}'.format(rbm, score_orig,
                                                    score_retrain)
        out_msg2 = '{} (orig, retrain):{},{}'.format(rbm, score_orig_mf,
                                                     score_retrain_mf)
        print out_msg
        print out_msg2
Exemple #8
0
def KanadeJointDBN(cache=False):
    print "Testing JointDBN which tries to learn id map association"

    # project set-up
    data_manager = store.StorageManager('Kanade/JointDBN', log=True)
    shape = 25
    dataset_name = 'sharp_equi{}_{}'.format(shape, shape)
    preprocessing = {'scale': True}

    # Load kanade database
    mapping = None
    # mapping = {'anger': 'sadness',
    #            'contempt': 'happy',
    #            'disgust': 'sadness',
    #            'fear': 'sadness',
    #            'happy': 'happy',
    #            'sadness': 'sadness',
    #            'surprise': 'happy'}

    dataset = loader.load_kanade(  # n=3000,
        pre=preprocessing, set_name=dataset_name)

    mapped_dataset = loader.load_kanade(  # n=3000,
        # emotions=['sadness', 'happy'],
        pre=preprocessing,
        set_name=dataset_name)  # Target Image
    train, valid, test = dataset
    train_x, train_y = train
    test_x, test_y = test

    # Sample associated image
    train_x_ass, train_y_ass = loader.sample_image(train_y,
                                                   mapping=mapping,
                                                   pre=preprocessing,
                                                   set_name=dataset_name)
    test_x_ass, test_y_ass = loader.sample_image(test_y,
                                                 mapping=mapping,
                                                 pre=preprocessing,
                                                 set_name=dataset_name)

    # Initialise RBM parameters
    base_tr = rbm_config.TrainParam(learning_rate=0.0001,
                                    momentum_type=rbm_config.NESTEROV,
                                    momentum=0.9,
                                    weight_decay=0.0001,
                                    sparsity_constraint=False,
                                    sparsity_target=0.00001,
                                    sparsity_decay=0.9,
                                    sparsity_cost=10000,
                                    epochs=100,
                                    batch_size=10)

    rest_tr = rbm_config.TrainParam(learning_rate=0.0001,
                                    momentum_type=rbm_config.CLASSICAL,
                                    momentum=0.5,
                                    weight_decay=0.01,
                                    epochs=100,
                                    batch_size=10)

    # Layer 1
    # Layer 2
    # Layer 3
    topology = [2 * (shape**2), 100, 100]
    # batch_size = 10
    first_progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2,
                                                                 shape))
    rest_progress_logger = rbm_logger.ProgressLogger()

    first_rbm_config = rbm_config.RBMConfig(
        train_params=base_tr, progress_logger=first_progress_logger)
    first_rbm_config.v_unit = rbm_units.GaussianVisibleUnit
    rest_rbm_config = rbm_config.RBMConfig(
        train_params=rest_tr, progress_logger=rest_progress_logger)
    rbm_configs = [first_rbm_config, rest_rbm_config, rest_rbm_config]

    config = DBN.DBNConfig(topology=topology,
                           training_parameters=base_tr,
                           rbm_configs=rbm_configs,
                           data_manager=data_manager)

    # construct the Deep Belief Network
    dbn = DBN.DBN(config)

    # Train DBN on concatenated images
    train_tX = theano.function([], T.concatenate([train_x, train_x_ass],
                                                 axis=1))()
    train_X = theano.shared(train_tX)
    test_tX = theano.function([], T.concatenate([test_x, test_x_ass],
                                                axis=1))()
    test_X = theano.shared(test_tX)
    test_tX2 = theano.function([],
                               T.concatenate(
                                   [test_x, T.zeros_like(test_x)], axis=1))()
    test_X2 = theano.shared(test_tX2)

    origs = []
    recons = []
    recons2 = []

    # Train DBN
    dbn.pretrain(train_X,
                 cache=[True, True, False],
                 train_further=[True, True, True])

    recon = dbn.reconstruct(train_X,
                            k=1,
                            plot_n=20,
                            img_name='stackedRBM_train_recon_{}_{}'.format(
                                topology, 0))
    train_x_ass_recon = recon[:, shape**2:]

    recon = dbn.reconstruct(test_X,
                            k=1,
                            plot_n=20,
                            img_name='stackedRBM_test_recon_{}_{}'.format(
                                topology, 0))
    test_x_ass_recon = recon[:, shape**2:]

    recon = dbn.reconstruct(test_X2,
                            k=2,
                            plot_n=20,
                            img_name='stackedRBM_test_zero_recon_{}_{}'.format(
                                topology, 0))
    test_x_ass_recon2 = recon[:, shape**2:]

    clf_recon = SimpleClassifier('logistic', train_x, train_y)
    score_orig = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval())

    clf_recon.retrain(train_x_ass_recon, train_y_ass.eval())
    score_recon = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval())
    score_recon2 = clf_recon.get_score(test_x_ass_recon2, test_y_ass.eval())

    print 'classification rate: {}, {}, {}'.format(score_orig, score_recon,
                                                   score_recon2)
    origs.append(score_orig)
    recons.append(score_recon)
    recons2.append(score_recon2)
Exemple #9
0
def KanadeAssociativeDBN(cache=False):
    print "Testing Associative RBM which tries to learn the following mapping: " \
          "ID"
    # "{anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}"
    # project set-up
    data_manager = store.StorageManager('Kanade/AssociativeDBNTest', log=True)
    shape = 25
    dataset_name = 'sharp_equi{}_{}'.format(shape, shape)
    preprocessing = {'scale': True}

    # Load kanade database
    mapping = None
    # mapping = {'anger': 'sadness',
    #            'contempt': 'happy',
    #            'disgust': 'sadness',
    #            'fear': 'sadness',
    #            'happy': 'happy',
    #            'sadness': 'sadness',
    #            'surprise': 'happy'}

    dataset = loader.load_kanade(n=100,
                                 pre=preprocessing,
                                 set_name=dataset_name)

    mapped_dataset = loader.load_kanade(
        n=100,
        # emotions=['sadness', 'happy'],
        pre=preprocessing,
        set_name=dataset_name)  # Target Image
    train, valid, test = dataset
    train_x, train_y = train
    test_x, test_y = test

    # Sample associated image
    train_x_ass, train_y_ass = loader.sample_image(train_y,
                                                   mapping=mapping,
                                                   pre=preprocessing,
                                                   set_name=dataset_name)
    test_x_ass, test_y_ass = loader.sample_image(test_y,
                                                 mapping=mapping,
                                                 pre=preprocessing,
                                                 set_name=dataset_name)

    # initialise AssociativeDBN
    config = associative_dbn.DefaultADBNConfig()

    # Gaussian Input Layer
    bottom_tr = rbm_config.TrainParam(learning_rate=0.0001,
                                      momentum_type=rbm_config.NESTEROV,
                                      momentum=0.9,
                                      weight_decay=0.0001,
                                      epochs=20,
                                      batch_size=10)
    h_n = 150
    bottom_logger = rbm_logger.ProgressLogger(img_shape=(shape, shape))
    bottom_rbm = rbm_config.RBMConfig(v_unit=rbm_units.GaussianVisibleUnit,
                                      v_n=shape**2,
                                      h_n=h_n,
                                      progress_logger=bottom_logger,
                                      train_params=bottom_tr)

    config.left_dbn.rbm_configs[0] = bottom_rbm
    config.right_dbn.rbm_configs[0] = bottom_rbm
    config.left_dbn.topology = [shape**2, h_n]
    config.right_dbn.topology = [shape**2, h_n]
    config.top_rbm.train_params.epochs = 20
    config.top_rbm.train_params.batch_size = 10
    config.n_association = 1000
    config.reuse_dbn = True
    adbn = associative_dbn.AssociativeDBN(config=config,
                                          data_manager=data_manager)

    # Plot sample
    loader.save_faces(
        train_x.get_value(borrow=True)[1:50],
        tile=(10, 10),
        img_name='n_orig.png',
    )
    loader.save_faces(train_x_ass.get_value(borrow=True)[1:50],
                      tile=(10, 10),
                      img_name='n_ass.png')

    # Train classifier to be used for classifying reconstruction associated image layer
    clf_orig = SimpleClassifier('knn', mapped_dataset[0][0],
                                mapped_dataset[0][1])

    # Test DBN Performance
    for i in xrange(0, 5):
        # Train DBN - learn joint distribution
        cache_left = [True]
        cache_right = [True]
        cache_top = False
        cache = [cache_left, cache_right, cache_top]
        adbn.train(train_x, train_x_ass, cache=cache)
        print "... trained associative DBN"

        # Reconstruct images
        test_x_recon = adbn.recall(test_x, associate_steps=500, recall_steps=0)
        print "... reconstructed images"

        # Classify the reconstructions

        # 1. Train classifier on original images
        score_orig = clf_orig.get_score(test_x_recon, test_y_ass.eval())

        # 2. Train classifier on reconstructed images - reconstruction obtained by right dbn
        right_dbn = adbn.dbn_right
        mapped_train_recon = right_dbn.reconstruct(
            mapped_dataset[0][0],
            k=1,
            plot_n=100,
            plot_every=1,
            img_name='right_dbn_reconstruction')
        clf_recon = SimpleClassifier('knn', mapped_train_recon,
                                     mapped_dataset[0][1].eval())
        score_retrain = clf_recon.get_score(test_x_recon, test_y_ass.eval())

        out_msg = '{} (orig, retrain):{},{}'.format(adbn, score_orig,
                                                    score_retrain)
        print out_msg