def noise_classification(project_name = 'NoiseClassification', emotions = {'happy':.9, 'sadness':0.1}): # Project set up manager = StorageManager(project_name, log=True) # Initialise dataset dataset = k_loader.load_kanade(set_name='25_25', pre={'scale': True}, emotions=emotions, n=1000) tr, vl, te = dataset tr_x, tr_y = tr te_x, te_y = te tr, vl, te= k_loader.load_kanade(shared=True, set_name='25_25', pre={'scale': True}, emotions=['happy','sadness'], n=10000) clf_tr_x, clf_tr_y = tr clf = SimpleClassifier('knn', clf_tr_x, clf_tr_y) emotions = ['sadness'] noisy_data = [] noisy_label = [] noisy_levels = ['', 'noise0.1_', 'noise0.3_','noise0.5_','noise0.7_','noise0.9_'] for noise_lvl in noisy_levels: t, vl, te = k_loader.load_kanade(set_name='{}25_25'.format(noise_lvl), pre={'scale': True}, emotions=emotions, n=1000) n_tr_x, n_tr_y = t noisy_data.append(n_tr_x) noisy_label.append(n_tr_y.eval()) assess_rbm(clf, noisy_data, noisy_label, noisy_levels, tr_x,'2')
def test_filter(self): dataset = loader.load_kanade(shared=False, set_name='50_50', emotions=['anger']) self.assertTrue(np.unique(dataset[1]) == 1) dataset = loader.load_kanade(shared=False, set_name='50_50', emotions=['anger', 'contempt']) labels = np.unique(dataset[1]) self.assertTrue(len(labels) == 2 and 1 in labels and 2 in labels)
def test_kanade_scaling(self): datasets = k_loader.load_kanade(shared=False, n=10) x, y = datasets[0] xsc = sklearn.preprocessing.scale(x.astype(float)) print x print xsc datasets = k_loader.load_kanade(shared=False, n=10, pre={'scale2unit':True, 'scale':True}) x, y = datasets[0] xsc = sklearn.preprocessing.scale(x.astype(float)) print xsc
def test_shared(self): dataset = loader.load_kanade(n=10) train_x, train_y = dataset # print type(train_x) # print type(train_y) self.assertTrue( isinstance(train_x, theano.tensor.sharedvar.TensorSharedVariable))
def test_kanade_scaling(self): datasets = k_loader.load_kanade(shared=False, n=10) x, y = datasets[0] xsc = sklearn.preprocessing.scale(x.astype(float)) print x print xsc datasets = k_loader.load_kanade(shared=False, n=10, pre={ 'scale2unit': True, 'scale': True }) x, y = datasets[0] xsc = sklearn.preprocessing.scale(x.astype(float)) print xsc
def test_load(self): dataset = loader.load_kanade(shared=False, set_name='50_50') x = dataset[0] y = dataset[1] print x.shape self.assertTrue(len(x) == len(y)) self.assertTrue(len(x[0]) == 50 * 50)
def test_scale(self): dataset = loader.load_kanade(shared=False, n=2, pre={'scale2unit': True}) self.assertTrue(len(dataset[0]) == 2 and len(dataset[1]) == 2) print dataset[0] print itemfreq(dataset[0])
def noise_classification(project_name='NoiseClassification', emotions={ 'happy': .9, 'sadness': 0.1 }): # Project set up manager = StorageManager(project_name, log=True) # Initialise dataset dataset = k_loader.load_kanade(set_name='25_25', pre={'scale': True}, emotions=emotions, n=1000) tr, vl, te = dataset tr_x, tr_y = tr te_x, te_y = te tr, vl, te = k_loader.load_kanade(shared=True, set_name='25_25', pre={'scale': True}, emotions=['happy', 'sadness'], n=10000) clf_tr_x, clf_tr_y = tr clf = SimpleClassifier('knn', clf_tr_x, clf_tr_y) emotions = ['sadness'] noisy_data = [] noisy_label = [] noisy_levels = [ '', 'noise0.1_', 'noise0.3_', 'noise0.5_', 'noise0.7_', 'noise0.9_' ] for noise_lvl in noisy_levels: t, vl, te = k_loader.load_kanade(set_name='{}25_25'.format(noise_lvl), pre={'scale': True}, emotions=emotions, n=1000) n_tr_x, n_tr_y = t noisy_data.append(n_tr_x) noisy_label.append(n_tr_y.eval()) assess_rbm(clf, noisy_data, noisy_label, noisy_levels, tr_x, '2')
def test_rbm(): print "Testing RBM" data_manager = store.StorageManager('TestRBM') # Load Cohn Kanade dataset datasets = loader.load_kanade(pre={'scale': True}, n=100, set_name='sharp_equi25_25') train_set_x, train_set_y = datasets[0] test_set_x, test_set_y = datasets[2] # Initilise the RBM and training parameters tr = TrainParam(learning_rate=0.0001, momentum_type=NESTEROV, momentum=0.5, weight_decay=0.0001, sparsity_constraint=True, sparsity_target=0.01, sparsity_cost=0.1, sparsity_decay=0.9, dropout=True, dropout_rate=0.5, batch_size=10, epochs=10) n_visible = train_set_x.get_value(borrow=True).shape[1] n_hidden = 10 config = RBMConfig() config.v_n = n_visible config.h_n = n_hidden config.v_unit = rbm_units.GaussianVisibleUnit # config.h_unit = rbm_units.ReLUnit config.progress_logger = ProgressLogger(img_shape=(25, 25)) config.train_params = tr rbm = RBM(config) print "... initialised RBM" load = store.retrieve_object(str(rbm)) if load: rbm = load for i in xrange(0, 1): # Train RBM rbm.train(train_set_x) data_manager.persist(rbm) # Test RBM Reconstruction via Linear Classifier clf = SimpleClassifier(classifier='logistic', train_x=train_set_x, train_y=train_set_y) recon_te = rbm.reconstruct(test_set_x, k=1, plot_n=100, plot_every=1,img_name='recon_te_{}.png'.format(i)) print 'Original Score: {}'.format(clf.get_score(test_set_x, test_set_y)) print 'Recon Score: {}'.format(clf.get_score(recon_te, test_set_y.eval()))
def train_kanade(): print "Testing RBM" data_manager = store.StorageManager('Kanade/SimpleRBMTest') # Load mnist hand digits datasets = loader.load_kanade(n=500, set_name='25_25', emotions=['happy', 'sadness'], pre={'scale2unit': True}) train_x, train_y = datasets[0] sparsity_constraint = True # Initialise the RBM and training parameters tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.001, sparsity_constraint=sparsity_constraint, sparsity_target=0.01, sparsity_cost=1, sparsity_decay=0.9, dropout=True, epochs=100) n_visible = train_x.get_value().shape[1] n_hidden = 500 config = rbm_config.RBMConfig( v_n=n_visible, v2_n=n_visible, h_n=n_hidden, v_unit=rbm_units.GaussianVisibleUnit, associative=False, cd_type=rbm_config.CLASSICAL, cd_steps=1, train_params=tr, progress_logger=rbm_logger.ProgressLogger(img_shape=(25, 25))) rbm = RBM(config) print "... initialised RBM" # Train RBM rbm.train(train_x) # Test RBM rbm.reconstruct(train_x, k=5, plot_n=10, plot_every=1) # Store Parameters data_manager.persist(rbm)
def test_classifier(): # Classify Kanade train, valid, test = loader.load_kanade(shared=False, set_name='sharp_equi25_25', pre={'scale2unit': True}) train_x, train_y = train valid_x, valid_y = valid test_x, test_y = test clf = SimpleClassifier(classifier='knn', train_x=train_x, train_y=train_y) print 'KNN: {}'.format(clf.get_score(test_x, test_y)) clf = SimpleClassifier(classifier='logistic', train_x=train_x, train_y=train_y) print 'Logistic Regression: {}'.format(clf.get_score(test_x, test_y)) clf = SimpleClassifier(classifier='svm', train_x=train_x, train_y=train_y) print 'LinearSVC: {}'.format(clf.get_score(test_x, test_y)) train, valid, test = loader.load_kanade(shared=True, pre={'scale2unit': True}) train_x, train_y = train valid_x, valid_y = valid test_x, test_y = test clf = SimpleClassifier(classifier='knn', train_x=train_x, train_y=train_y) print 'KNN: {}'.format(clf.get_score(test_x, test_y)) clf = SimpleClassifier(classifier='logistic', train_x=train_x, train_y=train_y) print 'Logistic Regression: {}'.format(clf.get_score(test_x, test_y)) clf = SimpleClassifier(classifier='svm', train_x=train_x, train_y=train_y) print 'LinearSVC: {}'.format(clf.get_score(test_x, test_y))
def train_kanade(): print "Testing RBM" data_manager = store.StorageManager('Kanade/SimpleRBMTest') # Load mnist hand digits datasets = loader.load_kanade(n=500, set_name='25_25', emotions=['happy', 'sadness'], pre={'scale2unit': True}) train_x, train_y = datasets[0] sparsity_constraint = True # Initialise the RBM and training parameters tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.001, sparsity_constraint=sparsity_constraint, sparsity_target=0.01, sparsity_cost=1, sparsity_decay=0.9, dropout=True, epochs=100) n_visible = train_x.get_value().shape[1] n_hidden = 500 config = rbm_config.RBMConfig(v_n=n_visible, v2_n=n_visible, h_n=n_hidden, v_unit=rbm_units.GaussianVisibleUnit, associative=False, cd_type=rbm_config.CLASSICAL, cd_steps=1, train_params=tr, progress_logger=rbm_logger.ProgressLogger(img_shape=(25, 25))) rbm = RBM(config) print "... initialised RBM" # Train RBM rbm.train(train_x) # Test RBM rbm.reconstruct(train_x, k=5, plot_n=10, plot_every=1) # Store Parameters data_manager.persist(rbm)
def test_sk_methods(): # Classify Kanade train, valid, test = loader.load_kanade(shared=False, set_name='sharp_equi25_25', pre={'scale2unit': True}, n=10000) train_x, train_y = train valid_x, valid_y = valid test_x, test_y = test # K-nearest neighbours ks = np.arange(1, 6, 4) # ks = np.arange(3, 36, 4) for k in ks: clf = KNeighborsClassifier(k, weights='uniform') clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print '{}-NN: {}'.format( k, np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # Logistic Regression clf = LogisticRegression() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'Logistic Regression: {}'.format( np.sum(guess_y == test_y) * 1.0 / len(guess_y)) print 'SVM' # SVM clf = svm.SVC() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'SVC: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # NuSVC # clf = svm.NuSVC() # clf.fit(train_x, train_y) # guess_y = clf.predict(test_x) # print 'NuSVC: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # SVM clf = svm.LinearSVC() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'LinearSVC: {}'.format( np.sum(guess_y == test_y) * 1.0 / len(guess_y))
def test_sk_methods(): # Classify Kanade train, valid, test = loader.load_kanade(shared=False, set_name='sharp_equi25_25', pre={'scale2unit': True}, n=10000) train_x, train_y = train valid_x, valid_y = valid test_x, test_y = test # K-nearest neighbours ks = np.arange(1, 6, 4) # ks = np.arange(3, 36, 4) for k in ks: clf = KNeighborsClassifier(k, weights='uniform') clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print '{}-NN: {}'.format(k, np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # Logistic Regression clf = LogisticRegression() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'Logistic Regression: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y)) print 'SVM' # SVM clf = svm.SVC() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'SVC: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # NuSVC # clf = svm.NuSVC() # clf.fit(train_x, train_y) # guess_y = clf.predict(test_x) # print 'NuSVC: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y)) # SVM clf = svm.LinearSVC() clf.fit(train_x, train_y) guess_y = clf.predict(test_x) print 'LinearSVC: {}'.format(np.sum(guess_y == test_y) * 1.0 / len(guess_y))
def experimentChild(project_name, mapping, shape, model): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('\tMODEL=') f.write(model) f.write('\tSHAPE=') f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset happy_set = kanade_loader.load_kanade(set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, n=100) h_tr, h_vl, h_te = happy_set h_tr_x, h_tr_y = h_tr h_vl_x, h_vl_y = h_vl h_te_x, h_te_y = h_te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(h_tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) concat1 = theano.function([], T.concatenate([h_tr_x, p_tr_x], axis=1))() tr_x = theano.shared(concat1, name='tr_x') c1 = T.concatenate([h_tr_x, p_tr_x], axis=1) c2 = T.concatenate([p_tr_x, h_tr_x], axis=1) c3 = theano.function([], T.concatenate([c1, c2], axis=0))() tr_x_mixed = theano.shared(c3, name='tr_x_mixed') # TODO make interface for brain model # initial_y = np.zeros(h_te_x.get_value(True).shape) initial_y = np.random.normal(0, 1, h_te_x.get_value(True).shape) initial_y = theano.shared(initial_y, name='initial_y') te_x = theano.shared(theano.function([], T.concatenate([h_te_x, initial_y], axis=1))().astype(t_float_x)) if model == 'rbm': brain_c = get_brain_model_RBM(shape) load = data_manager.retrieve(str(brain_c)) if load: brain_c = load else: brain_c.set_initial_hidden_bias() brain_c.set_hidden_mean_activity(tr_x) brain_c.train(tr_x) data_manager.persist(brain_c) # brain_c.train(tr_x_mixed) recon = brain_c.reconstruct_association_opt(h_te_x, initial_y, k=10, plot_n=100, img_name='rbm_child_recon_{}'.format(shape)) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='rbm_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] elif model == 'dbn': brain_c = get_brain_model_DBN(shape, data_manager) brain_c.pretrain(tr_x, cache=[True, True, True], train_further= [True, True, True]) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='dbn_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] recon_pair = brain_c.reconstruct(te_x, k=1, plot_n=100, img_name='dbn_child_recon_{}'.format(shape)) recon = recon_pair[:, (shape ** 2):] elif model == 'adbn': brain_c = get_brain_model_AssociativeDBN(shape, data_manager) brain_c.train(h_tr_x, p_tr_x, cache=[[True, True, True], [True, True, True], True], train_further=[[True, True, True], [True, True, True], True]) # Reconstruction recon_p_tr_x = brain_c.dbn_right.reconstruct(p_tr_x, k=10, plot_every=1, plot_n=100, img_name='adbn_right_recon_{}'.format(shape)) recon = brain_c.recall(h_te_x, associate_steps=5, recall_steps=0, img_name='adbn_child_recon_{}'.format(shape)) # Train classifier on reconstruction clf = SimpleClassifier('logistic', recon_p_tr_x, p_tr_y.eval()) # Output number of classes res = clf.classify(recon).T r = np.histogram(res, bins=np.arange(1, 9)) labels = map(lambda x: kanade_loader.emotion_rev_dict[int(x)], r[1][:-1]) # labels = map(lambda x: kanade_loader.emotion_rev_dict[int(x)], r[0]) proportion = r[0] * 1. / sum(r[0]) txt = 'Learnt Child Configuration:' print txt f.write(txt) f.write('\n') for i, l in enumerate(labels): fill_space = (max(map(lambda x: len(x), labels)) - len(l)) txt = '{}{}:\t %.3f'.format(l, ' ' * fill_space) % proportion[i] print txt f.write(txt) f.write('\n') f.write('\n') f.close() data_manager.finish()
def KanadeJointDBN(cache=False): print "Testing JointDBN which tries to learn id map association" # project set-up data_manager = store.StorageManager('Kanade/JointDBN', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocessing = {'scale': True} # Load kanade database mapping = None # mapping = {'anger': 'sadness', # 'contempt': 'happy', # 'disgust': 'sadness', # 'fear': 'sadness', # 'happy': 'happy', # 'sadness': 'sadness', # 'surprise': 'happy'} dataset = loader.load_kanade( # n=3000, pre=preprocessing, set_name=dataset_name) mapped_dataset = loader.load_kanade( # n=3000, # emotions=['sadness', 'happy'], pre=preprocessing, set_name=dataset_name) # Target Image train, valid, test = dataset train_x, train_y = train test_x, test_y = test # Sample associated image train_x_ass, train_y_ass = loader.sample_image(train_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) test_x_ass, test_y_ass = loader.sample_image(test_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) # Initialise RBM parameters base_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, sparsity_constraint=False, sparsity_target=0.00001, sparsity_decay=0.9, sparsity_cost=10000, epochs=100, batch_size=10) rest_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.CLASSICAL, momentum=0.5, weight_decay=0.01, epochs=100, batch_size=10) # Layer 1 # Layer 2 # Layer 3 topology = [2 * (shape ** 2), 100, 100] # batch_size = 10 first_progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2, shape)) rest_progress_logger = rbm_logger.ProgressLogger() first_rbm_config = rbm_config.RBMConfig(train_params=base_tr, progress_logger=first_progress_logger) first_rbm_config.v_unit = rbm_units.GaussianVisibleUnit rest_rbm_config = rbm_config.RBMConfig(train_params=rest_tr, progress_logger=rest_progress_logger) rbm_configs = [first_rbm_config, rest_rbm_config, rest_rbm_config] config = DBN.DBNConfig(topology=topology, training_parameters=base_tr, rbm_configs=rbm_configs, data_manager=data_manager) # construct the Deep Belief Network dbn = DBN.DBN(config) # Train DBN on concatenated images train_tX = theano.function([], T.concatenate([train_x, train_x_ass], axis=1))() train_X = theano.shared(train_tX) test_tX = theano.function([], T.concatenate([test_x, test_x_ass], axis=1))() test_X = theano.shared(test_tX) test_tX2 = theano.function([], T.concatenate([test_x, T.zeros_like(test_x)], axis=1))() test_X2 = theano.shared(test_tX2) origs = [] recons = [] recons2 = [] # Train DBN dbn.pretrain(train_X, cache=[True, True, False], train_further=[True, True, True]) recon = dbn.reconstruct(train_X, k=1, plot_n=20, img_name='stackedRBM_train_recon_{}_{}'.format(topology, 0)) train_x_ass_recon = recon[:, shape ** 2:] recon = dbn.reconstruct(test_X, k=1, plot_n=20, img_name='stackedRBM_test_recon_{}_{}'.format(topology, 0)) test_x_ass_recon = recon[:, shape ** 2:] recon = dbn.reconstruct(test_X2, k=2, plot_n=20, img_name='stackedRBM_test_zero_recon_{}_{}'.format(topology, 0)) test_x_ass_recon2 = recon[:, shape ** 2:] clf_recon = SimpleClassifier('logistic', train_x, train_y) score_orig = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval()) clf_recon.retrain(train_x_ass_recon, train_y_ass.eval()) score_recon = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval()) score_recon2 = clf_recon.get_score(test_x_ass_recon2, test_y_ass.eval()) print 'classification rate: {}, {}, {}'.format(score_orig, score_recon, score_recon2) origs.append(score_orig) recons.append(score_recon) recons2.append(score_recon2)
def experiment_adbn(project_name, mapping, shape): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset dataset = kanade_loader.load_kanade(set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, # n=100 ) tr, vl, te = dataset tr_x, tr_y = tr te_x, te_y = te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) configs = [] for lr1 in [0.01, 0.001, 0.0001]: for dropout in [True]: for n_association in [50, 100, 250]: config = get_brain_model_AssociativeDBN(shape, n_association=n_association,h_n=250, h_n2=100,dropout=dropout) config.left_dbn.rbm_configs[1].train_params.learning_rate = lr1 config.right_dbn.rbm_configs[1].train_params.learning_rate = lr1 config.top_rbm.train_params.learning_rate = lr1 # config.n_association = n_association # config.left_dbn.topology = [shape ** 2, h_n, h_n] # config.left_dbn.rbm_configs[0].h_n = h_n # config.left_dbn.rbm_configs[1].v_n = h_n # config.left_dbn.rbm_configs[1].h_n = h_n configs.append(config) for epoch in xrange(10): for i, config in enumerate(configs): brain_c = associative_dbn.AssociativeDBN(config, data_manager=StorageManager('{}/{}'.format(project_name, i), log=False)) brain_c.train(tr_x, p_tr_x, cache=[[True, True, True], [True, True, True], True], train_further=[[True, True, True], [True, True, True], True]) # Reconstruction recon_p_tr_x = brain_c.dbn_right.reconstruct(p_tr_x, k=10, plot_every=1, plot_n=100, img_name='{}_right_{}'.format(epoch, shape)) recon = brain_c.recall(te_x, associate_steps=5, recall_steps=0, img_name='adbn_child_recon_{}'.format(shape)) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) # write_evaluation(f, labels, proportion) errors = {} y_types = ['active_h', 'v_noisy_active_h', 'zero', 'binomial0.1'] for y_type in y_types: errors[y_type] = {} for emo in xrange(len(kanade_loader.emotion_dict)): errors[y_type][emo] = [proportion[emo]] for j in xrange(1): brain_c.fine_tune(tr_x, p_tr_x, epochs=1) recon_p_tr_x = brain_c.dbn_right.reconstruct(p_tr_x, k=10, plot_every=1, plot_n=100, img_name='{}_right_ft{}_{}'.format(epoch, j, shape)) for y_type in y_types: # Reconstruction recon = brain_c.recall(te_x, associate_steps=5, recall_steps=0, img_name='{}_{}_ft{}_{}'.format(y_type, epoch, j, shape), y_type=y_type) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) for i, l in enumerate(labels): errors[y_type][i].append(proportion[i]) print errors f.write('{}\n'.format(brain_c)) for y_type in y_types: f.write('{}\n'.format(y_type)) for emo in errors[y_type]: f.write('{}:'.format(kanade_loader.emotion_rev_dict[emo + 1])) for v in errors[y_type][emo]: f.write('%.2f,' % v) f.write('\n') f.write('\n') f.write('\n') f.close() data_manager.finish()
def experimentChild(project_name, mapping, shape, model): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('\tMODEL=') f.write(model) f.write('\tSHAPE=') f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset happy_set = kanade_loader.load_kanade( set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, ) h_tr, h_vl, h_te = happy_set h_tr_x, h_tr_y = h_tr h_vl_x, h_vl_y = h_vl h_te_x, h_te_y = h_te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(h_tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) concat1 = theano.function([], T.concatenate([h_tr_x, p_tr_x], axis=1))() tr_x = theano.shared(concat1, name='tr_x') c1 = T.concatenate([h_tr_x, p_tr_x], axis=1) c2 = T.concatenate([p_tr_x, h_tr_x], axis=1) c3 = theano.function([], T.concatenate([c1, c2], axis=0))() tr_x_mixed = theano.shared(c3, name='tr_x_mixed') # initial_y = np.zeros(h_te_x.get_value(True).shape) initial_y = np.random.normal(0, 1, h_te_x.get_value(True).shape) initial_y = theano.shared(initial_y, name='initial_y') te_x = theano.shared( theano.function([], T.concatenate([h_te_x, initial_y], axis=1))().astype(t_float_x)) if model == 'rbm': brain_c = get_brain_model_RBM(shape) load = data_manager.retrieve(str(brain_c)) if load: brain_c = load else: brain_c.set_initial_hidden_bias() brain_c.set_hidden_mean_activity(tr_x) brain_c.train(tr_x) data_manager.persist(brain_c) # brain_c.train(tr_x_mixed) recon = brain_c.reconstruct_association_opt( h_te_x, initial_y, k=10, plot_n=100, img_name='rbm_child_recon_{}'.format(shape)) recon_pair = brain_c.reconstruct( tr_x, k=1, plot_n=100, img_name='rbm_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape**2):] elif model == 'dbn': brain_c = get_brain_model_DBN(shape, data_manager) brain_c.pretrain(tr_x, cache=[True, True, True], train_further=[True, True, True]) recon_pair = brain_c.reconstruct( tr_x, k=1, plot_n=100, img_name='dbn_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape**2):] recon_pair = brain_c.reconstruct( te_x, k=1, plot_n=100, img_name='dbn_child_recon_{}'.format(shape)) recon = recon_pair[:, (shape**2):] elif model == 'adbn': config = get_brain_model_AssociativeDBN(shape) brain_c = associative_dbn.AssociativeDBN(config) brain_c.train(h_tr_x, p_tr_x, cache=[[True, True, True], [True, True, True], True], train_further=[[True, True, True], [True, True, True], True]) # Reconstruction recon_p_tr_x = brain_c.dbn_right.reconstruct( p_tr_x, k=10, plot_every=1, plot_n=100, img_name='adbn_right_recon_{}'.format(shape)) recon = brain_c.recall(h_te_x, associate_steps=5, recall_steps=0, img_name='adbn_child_recon_{}'.format(shape)) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) write_evaluation(f, labels, proportion) f.write('\n') f.close() data_manager.finish()
def experiment_dbn(project_name, mapping, shape): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset happy_set = kanade_loader.load_kanade(set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, n=100) h_tr, h_vl, h_te = happy_set h_tr_x, h_tr_y = h_tr h_vl_x, h_vl_y = h_vl h_te_x, h_te_y = h_te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(h_tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) concat1 = theano.function([], T.concatenate([h_tr_x, p_tr_x], axis=1))() tr_x = theano.shared(concat1, name='tr_x') # initial_y = np.zeros(h_te_x.get_value(True).shape) initial_y = np.random.normal(0, 1, h_te_x.get_value(True).shape) initial_y = theano.shared(initial_y, name='initial_y') te_x = theano.shared( theano.function([], T.concatenate([h_te_x, initial_y], axis=1))().astype(t_float_x)) configs = [] jj = 0 for lr1 in [0.0001]: #, 0.001, 0.01]: for h_n1 in [500]: for h_n2 in [100, 250, 500]: for h_n3 in [100, 250, 500]: config = get_brain_model_DBN(shape, data_manager=StorageManager( '{}/{}'.format( project_name, jj), log=False)) config.rbm_configs[0].h_n = h_n1 config.rbm_configs[1].v_n = h_n1 config.rbm_configs[1].h_n = h_n2 config.rbm_configs[2].v_n = h_n2 config.rbm_configs[2].h_n = h_n3 config.topology = [25 * 25 * 2, h_n1, h_n2, h_n3] config.rbm_configs[1].train_params.learning_rate = lr1 config.rbm_configs[2].train_params.learning_rate = lr1 configs.append(config) jj += 1 for epoch in xrange(10): for i, config in enumerate(configs): brain_c = DBN.DBN(config) brain_c.pretrain(tr_x, cache=[True, True, True], train_further=[True, True, True]) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='{}_{}_recon_{}'.format( i, epoch, shape)) recon_p_tr_x = recon_pair[:, (shape**2):] recon_pair = brain_c.reconstruct( te_x, k=1, plot_n=100, img_name='{}_{}_single_recon_{}'.format(i, epoch, shape)) recon = recon_pair[:, (shape**2):] labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) # write_evaluation(f, labels, proportion) errors = {} for emo in xrange(len(kanade_loader.emotion_dict)): errors[emo] = [proportion[emo]] for j in xrange(3): brain_c.fine_tune(tr_x, epochs=1) recon_pair = brain_c.reconstruct( tr_x, k=1, plot_n=100, img_name='{}_{}_recon_ft_{}'.format(i, epoch, shape)) recon_p_tr_x = recon_pair[:, (shape**2):] recon_pair = brain_c.reconstruct( te_x, k=1, plot_n=100, img_name='{}_{}_single_recon_ft_{}'.format( i, epoch, shape)) recon = recon_pair[:, (shape**2):] labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) for k, l in enumerate(labels): errors[k].append(proportion[k]) print errors f.write('{}, {}\n'.format(i, brain_c)) for emo in errors: f.write('{}:'.format(kanade_loader.emotion_rev_dict[emo + 1])) for v in errors[emo]: f.write('%.2f,' % v) f.write('\n') f.write('\n') f.write('\n') f.close() data_manager.finish()
def KanadeAssociativeRBM(cache=False, train_further=False): print "Testing Associative RBM which tries to learn the ID map " # print "Testing Associative RBM which tries to learn the following mapping: {anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}" # project set-up data_manager = store.StorageManager('Kanade/OptMFSparse0.01RBMTest', log=True) # data_manager = store.StorageManager('Kanade/OptAssociativeRBMTest', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) # Load kanade database mapping = None # id map # mapping = {'anger': 'sadness', 'contempt': 'happy', 'disgust': 'sadness', 'fear': 'sadness', 'happy': 'happy', # 'sadness': 'sadness', 'surprise': 'happy'} train, valid, test = loader.load_kanade(pre={'scale': True}, set_name=dataset_name) train_x, train_y = train test_x, test_y = test # Sample associated image train_x_mapped, train_y_mapped = loader.sample_image(train_y, mapping=mapping, pre={'scale': True}, set_name=dataset_name) test_x_mapped, test_y_mapped = loader.sample_image(test_y, mapping=mapping, pre={'scale': True}, set_name=dataset_name) # Concatenate images concat1 = T.concatenate([train_x, train_x_mapped], axis=1) # concat2 = T.concatenate([train_x_mapped, train_x], axis=1) # concat = T.concatenate([concat1, concat2], axis=0) # train_tX = theano.function([], concat)() train_tX = theano.function([], concat1)() train_X = theano.shared(train_tX) # Train classifier to be used for classifying reconstruction associated image layer # mapped_data = loader.load_kanade(#emotions=['sadness', 'happy'], # pre={'scale': True}, # set_name=dataset_name) # Target Image # clf_orig = SimpleClassifier('logistic', mapped_data[0][0], mapped_data[0][1]) clf_orig = SimpleClassifier('logistic', train_x, train_y) # Initialise RBM tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, sparsity_constraint=True, sparsity_target=0.01, sparsity_cost=100, sparsity_decay=0.9, batch_size=10, epochs=10) n_visible = shape * shape * 2 n_hidden = 500 config = rbm_config.RBMConfig() config.v_n = n_visible config.h_n = n_hidden config.v_unit = rbm_units.GaussianVisibleUnit # config.h_unit = rbm_units.ReLUnit config.progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2, shape)) config.train_params = tr rbm = RBM(config) print "... initialised RBM" # Load RBM (test) loaded = data_manager.retrieve(str(rbm)) if loaded: rbm = loaded else: rbm.set_initial_hidden_bias() rbm.set_hidden_mean_activity(train_X) # Train RBM - learn joint distribution # rbm.pretrain_lr(train_x, train_x01) for i in xrange(0, 10): if not cache or train_further: rbm.train(train_X) data_manager.persist(rbm) print "... reconstruction of associated images" # Get reconstruction with train data to get 'mapped' images to train classifiers on reconstruction = rbm.reconstruct(train_X, 1, plot_n=100, plot_every=1, img_name='recon_train') reconstruct_assoc_part = reconstruction[:, (shape**2):] # Get associated images of test data nsamples = np.random.normal(0, 1, test_x.get_value(True).shape).astype( np.float32) initial_y = theano.shared(nsamples, name='initial_y') utils.save_images(nsamples[0:100], 'initialisation.png', (10, 10), (25, 25)) test_x_associated = rbm.reconstruct_association_opt( test_x, initial_y, 5, 0., plot_n=100, plot_every=1, img_name='recon_test_gibbs') mf_recon = rbm.mean_field_inference_opt(test_x, y=initial_y, sample=False, k=10, img_name='recon_test_mf_raw') # Concatenate images test_MFX = theano.function([], T.concatenate([test_x, mf_recon], axis=1))() test_MF = theano.shared(test_MFX) reconstruction = rbm.reconstruct(test_MF, 1, plot_n=100, plot_every=1, img_name='recon_test_mf_recon') mf_recon = reconstruction[:, (shape**2):] print "... reconstructed" # Classify the reconstructions # 1. Train classifier on original images score_orig = clf_orig.get_score(test_x_associated, test_y_mapped.eval()) score_orig_mf = clf_orig.get_score(test_x_associated, test_y_mapped.eval()) # 2. Train classifier on reconstructed images clf_recon = SimpleClassifier('logistic', reconstruct_assoc_part, train_y_mapped.eval()) score_retrain = clf_recon.get_score(test_x_associated, test_y_mapped.eval()) score_retrain_mf = clf_recon.get_score(mf_recon, test_y_mapped.eval()) out_msg = '{} (orig, retrain):{},{}'.format(rbm, score_orig, score_retrain) out_msg2 = '{} (orig, retrain):{},{}'.format(rbm, score_orig_mf, score_retrain_mf) print out_msg print out_msg2
def associate_data2dataDBN(cache=False): print "Testing Associative DBN which tries to learn even-oddness of numbers" # project set-up data_manager = store.StorageManager('Kanade/associative_dbn_test', log=True) # Load mnist hand digits, class label is already set to binary dataset = loader.load_kanade(n=500, emotions=['anger', 'sadness', 'happy'], pre={'scale2unit': True}) train_x, train_y = dataset train_x01 = loader.sample_image(train_y) dataset01 = loader.load_kanade(n=500) # Initialise RBM parameters # fixed base train param base_tr = RBM.TrainParam(learning_rate=0.001, momentum_type=RBM.CLASSICAL, momentum=0.5, weight_decay=0.0005, sparsity_constraint=False, epochs=20) # top layer parameters tr = RBM.TrainParam( learning_rate=0.001, # find_learning_rate=True, momentum_type=RBM.NESTEROV, momentum=0.5, weight_decay=0.001, sparsity_constraint=False, epochs=20) tr_top = RBM.TrainParam( learning_rate=0.001, # find_learning_rate=True, momentum_type=RBM.CLASSICAL, momentum=0.5, weight_decay=0.001, sparsity_constraint=False, epochs=20) # Layer 1 # Layer 2 # Layer 3 # topology = [784, 500, 500, 100] config = associative_dbn.DefaultADBNConfig() config.topology_left = [625, 500, 500, 100] config.topology_right = [625, 500, 500, 100] config.reuse_dbn = False config.top_rbm_params = tr_top config.base_rbm_params = [base_tr, tr, tr] count = 0 for cd_type in [RBM.CLASSICAL, RBM.PERSISTENT]: for n_ass in [100, 250, 500, 750, 1000]: config.n_association = n_ass config.top_cd_type = cd_type # Construct DBN ass_dbn = associative_dbn.AssociativeDBN(config=config, data_manager=data_manager) # Train for trainN in xrange(0, 5): ass_dbn.train(train_x, train_x01, cache=cache) for n_recall in [1, 3, 10]: for n_think in [0, 1, 3, 5, 10]: # 1, 3, 5, 7, 10]: # Reconstruct sampled = ass_dbn.recall(train_x, n_recall, n_think) # Sample from top layer to generate data sample_n = 100 utils.save_images( sampled, image_name='{}_reconstruced_{}_{}_{}.png'.format( count, n_ass, n_recall, n_think), shape=(sample_n / 10, 10), img_shape=(25, 25)) count += 1
def KanadeJointDBN(cache=False): print "Testing JointDBN which tries to learn id map association" # project set-up data_manager = store.StorageManager('Kanade/JointDBN', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocessing = {'scale': True} # Load kanade database mapping = None # mapping = {'anger': 'sadness', # 'contempt': 'happy', # 'disgust': 'sadness', # 'fear': 'sadness', # 'happy': 'happy', # 'sadness': 'sadness', # 'surprise': 'happy'} dataset = loader.load_kanade( # n=3000, pre=preprocessing, set_name=dataset_name) mapped_dataset = loader.load_kanade( # n=3000, # emotions=['sadness', 'happy'], pre=preprocessing, set_name=dataset_name) # Target Image train, valid, test = dataset train_x, train_y = train test_x, test_y = test # Sample associated image train_x_ass, train_y_ass = loader.sample_image(train_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) test_x_ass, test_y_ass = loader.sample_image(test_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) # Initialise RBM parameters base_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, sparsity_constraint=False, sparsity_target=0.00001, sparsity_decay=0.9, sparsity_cost=10000, epochs=100, batch_size=10) rest_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.CLASSICAL, momentum=0.5, weight_decay=0.01, epochs=100, batch_size=10) # Layer 1 # Layer 2 # Layer 3 topology = [2 * (shape**2), 100, 100] # batch_size = 10 first_progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2, shape)) rest_progress_logger = rbm_logger.ProgressLogger() first_rbm_config = rbm_config.RBMConfig( train_params=base_tr, progress_logger=first_progress_logger) first_rbm_config.v_unit = rbm_units.GaussianVisibleUnit rest_rbm_config = rbm_config.RBMConfig( train_params=rest_tr, progress_logger=rest_progress_logger) rbm_configs = [first_rbm_config, rest_rbm_config, rest_rbm_config] config = DBN.DBNConfig(topology=topology, training_parameters=base_tr, rbm_configs=rbm_configs, data_manager=data_manager) # construct the Deep Belief Network dbn = DBN.DBN(config) # Train DBN on concatenated images train_tX = theano.function([], T.concatenate([train_x, train_x_ass], axis=1))() train_X = theano.shared(train_tX) test_tX = theano.function([], T.concatenate([test_x, test_x_ass], axis=1))() test_X = theano.shared(test_tX) test_tX2 = theano.function([], T.concatenate( [test_x, T.zeros_like(test_x)], axis=1))() test_X2 = theano.shared(test_tX2) origs = [] recons = [] recons2 = [] # Train DBN dbn.pretrain(train_X, cache=[True, True, False], train_further=[True, True, True]) recon = dbn.reconstruct(train_X, k=1, plot_n=20, img_name='stackedRBM_train_recon_{}_{}'.format( topology, 0)) train_x_ass_recon = recon[:, shape**2:] recon = dbn.reconstruct(test_X, k=1, plot_n=20, img_name='stackedRBM_test_recon_{}_{}'.format( topology, 0)) test_x_ass_recon = recon[:, shape**2:] recon = dbn.reconstruct(test_X2, k=2, plot_n=20, img_name='stackedRBM_test_zero_recon_{}_{}'.format( topology, 0)) test_x_ass_recon2 = recon[:, shape**2:] clf_recon = SimpleClassifier('logistic', train_x, train_y) score_orig = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval()) clf_recon.retrain(train_x_ass_recon, train_y_ass.eval()) score_recon = clf_recon.get_score(test_x_ass_recon, test_y_ass.eval()) score_recon2 = clf_recon.get_score(test_x_ass_recon2, test_y_ass.eval()) print 'classification rate: {}, {}, {}'.format(score_orig, score_recon, score_recon2) origs.append(score_orig) recons.append(score_recon) recons2.append(score_recon2)
def KanadeAssociativeDBN(cache=False): print "Testing Associative RBM which tries to learn the following mapping: " \ "ID" # "{anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}" # project set-up data_manager = store.StorageManager('Kanade/AssociativeDBNTest', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocessing = {'scale': True} # Load kanade database mapping = None # mapping = {'anger': 'sadness', # 'contempt': 'happy', # 'disgust': 'sadness', # 'fear': 'sadness', # 'happy': 'happy', # 'sadness': 'sadness', # 'surprise': 'happy'} dataset = loader.load_kanade(n=100, pre=preprocessing, set_name=dataset_name) mapped_dataset = loader.load_kanade( n=100, # emotions=['sadness', 'happy'], pre=preprocessing, set_name=dataset_name) # Target Image train, valid, test = dataset train_x, train_y = train test_x, test_y = test # Sample associated image train_x_ass, train_y_ass = loader.sample_image(train_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) test_x_ass, test_y_ass = loader.sample_image(test_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) # initialise AssociativeDBN config = associative_dbn.DefaultADBNConfig() # Gaussian Input Layer bottom_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, epochs=20, batch_size=10) h_n = 150 bottom_logger = rbm_logger.ProgressLogger(img_shape=(shape, shape)) bottom_rbm = rbm_config.RBMConfig(v_unit=rbm_units.GaussianVisibleUnit, v_n=shape**2, h_n=h_n, progress_logger=bottom_logger, train_params=bottom_tr) config.left_dbn.rbm_configs[0] = bottom_rbm config.right_dbn.rbm_configs[0] = bottom_rbm config.left_dbn.topology = [shape**2, h_n] config.right_dbn.topology = [shape**2, h_n] config.top_rbm.train_params.epochs = 20 config.top_rbm.train_params.batch_size = 10 config.n_association = 1000 config.reuse_dbn = True adbn = associative_dbn.AssociativeDBN(config=config, data_manager=data_manager) # Plot sample loader.save_faces( train_x.get_value(borrow=True)[1:50], tile=(10, 10), img_name='n_orig.png', ) loader.save_faces(train_x_ass.get_value(borrow=True)[1:50], tile=(10, 10), img_name='n_ass.png') # Train classifier to be used for classifying reconstruction associated image layer clf_orig = SimpleClassifier('knn', mapped_dataset[0][0], mapped_dataset[0][1]) # Test DBN Performance for i in xrange(0, 5): # Train DBN - learn joint distribution cache_left = [True] cache_right = [True] cache_top = False cache = [cache_left, cache_right, cache_top] adbn.train(train_x, train_x_ass, cache=cache) print "... trained associative DBN" # Reconstruct images test_x_recon = adbn.recall(test_x, associate_steps=500, recall_steps=0) print "... reconstructed images" # Classify the reconstructions # 1. Train classifier on original images score_orig = clf_orig.get_score(test_x_recon, test_y_ass.eval()) # 2. Train classifier on reconstructed images - reconstruction obtained by right dbn right_dbn = adbn.dbn_right mapped_train_recon = right_dbn.reconstruct( mapped_dataset[0][0], k=1, plot_n=100, plot_every=1, img_name='right_dbn_reconstruction') clf_recon = SimpleClassifier('knn', mapped_train_recon, mapped_dataset[0][1].eval()) score_retrain = clf_recon.get_score(test_x_recon, test_y_ass.eval()) out_msg = '{} (orig, retrain):{},{}'.format(adbn, score_orig, score_retrain) print out_msg
def test_kanades_grrbm(self): nvis = 625 nhid = 1000 train_n = 10000 batch_n = 20 tr = rbm_config.TrainParam(learning_rate=0.05, momentum_type=rbm_config.CLASSICAL, momentum=0.5, weight_decay=0, sparsity_constraint=False, sparsity_target=0.1**9, sparsity_cost=10**8, sparsity_decay=0.9, epochs=5, batch_size=batch_n) config = rbm_config.RBMConfig() config.v_n = nvis config.h_n = nhid config.v_unit = rbm_units.GaussianVisibleUnit config.h_unit = rbm_units.ReLUnit config.progress_logger = rbm_logger.ProgressLogger() config.train_params = tr np_rand = np.random.RandomState(123) # Weights W = np_rand.normal(0, 0.01, size=(nvis, nhid)).astype(np.float32) vb = np.zeros(nvis, dtype=np.float32) hb = np.zeros(nhid, dtype=np.float32) Wt = theano.shared(W, name='W') vbt = theano.shared(vb, name='vbias') hbt = theano.shared(hb, name='hbias') g_rbm = rbm.RBM(config, W=Wt, h_bias=hbt, v_bias=vbt) self.assertTrue(g_rbm) self.assertTrue(isinstance(g_rbm.v_unit, rbm_units.GaussianVisibleUnit)) self.assertTrue(isinstance(g_rbm.h_unit, rbm_units.RBMUnit)) self.assertTrue( np.count_nonzero(g_rbm.W.get_value(borrow=True) - W) == 0) self.assertTrue( np.count_nonzero(g_rbm.v_bias.get_value(borrow=True) - vb) == 0) self.assertTrue( np.count_nonzero(g_rbm.h_bias.get_value(borrow=True) - hb) == 0) tr, vl, te = k_loader.load_kanade(n=batch_n, pre={'scale': True}) v = tr[0] vv = v.get_value(borrow=True).ravel() table = ss.itemfreq(vv) print table x = [pt[0] for pt in table] y = [pt[1] for pt in table] plt.plot(x, y) plt.show() # v = theano.shared(x) h = g_rbm.h_unit.activate(g_rbm.h_unit.scale(T.dot(v, W) + hb)) _, _, h = g_rbm.sample_h_given_v(v) _, _, vs = g_rbm.sample_v_given_h(h) _, _, hs = g_rbm.sample_h_given_v(vs) dw = (T.dot(v.T, h) - T.dot(vs.T, hs)) / batch_n dv = T.mean(v - vs, axis=0) dh = T.mean(h - hs, axis=0) gr = g_rbm.get_partial_derivatives(v, None)['gradients'] gdw, gdv, gdh = gr[0], gr[1], gr[2] print gdw, gdv, gdh compute_derivative = theano.function([], [dw, dv, dh, gdw, gdv, gdh]) for i in xrange(1): a, b, c, d, e, f = compute_derivative() # print a, b, c print 'unfold' print a[0][0:5], b[1:5], c[1:5] print 'rbm' print d[0][0:5], e[1:5], f[1:5]
def KanadeAssociativeDBN(cache=False): print "Testing Associative RBM which tries to learn the following mapping: " \ "ID" # "{anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}" # project set-up data_manager = store.StorageManager('Kanade/AssociativeDBNTest', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocessing = {'scale': True} # Load kanade database mapping = None # mapping = {'anger': 'sadness', # 'contempt': 'happy', # 'disgust': 'sadness', # 'fear': 'sadness', # 'happy': 'happy', # 'sadness': 'sadness', # 'surprise': 'happy'} dataset = loader.load_kanade( n=100, pre=preprocessing, set_name=dataset_name) mapped_dataset = loader.load_kanade( n=100, # emotions=['sadness', 'happy'], pre=preprocessing, set_name=dataset_name) # Target Image train, valid, test = dataset train_x, train_y = train test_x, test_y = test # Sample associated image train_x_ass, train_y_ass = loader.sample_image(train_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) test_x_ass, test_y_ass = loader.sample_image(test_y, mapping=mapping, pre=preprocessing, set_name=dataset_name) # initialise AssociativeDBN config = associative_dbn.DefaultADBNConfig() # Gaussian Input Layer bottom_tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, epochs=20, batch_size=10) h_n = 150 bottom_logger = rbm_logger.ProgressLogger(img_shape=(shape, shape)) bottom_rbm = rbm_config.RBMConfig(v_unit=rbm_units.GaussianVisibleUnit, v_n=shape ** 2, h_n=h_n, progress_logger=bottom_logger, train_params=bottom_tr) config.left_dbn.rbm_configs[0] = bottom_rbm config.right_dbn.rbm_configs[0] = bottom_rbm config.left_dbn.topology = [shape ** 2, h_n] config.right_dbn.topology = [shape ** 2, h_n] config.top_rbm.train_params.epochs = 20 config.top_rbm.train_params.batch_size = 10 config.n_association = 1000 config.reuse_dbn = True adbn = associative_dbn.AssociativeDBN(config=config, data_manager=data_manager) # Plot sample loader.save_faces(train_x.get_value(borrow=True)[1:50], tile=(10, 10), img_name='n_orig.png', ) loader.save_faces(train_x_ass.get_value(borrow=True)[1:50], tile=(10, 10), img_name='n_ass.png') # Train classifier to be used for classifying reconstruction associated image layer clf_orig = SimpleClassifier('knn', mapped_dataset[0][0], mapped_dataset[0][1]) # Test DBN Performance for i in xrange(0, 5): # Train DBN - learn joint distribution cache_left = [True] cache_right = [True] cache_top = False cache = [cache_left, cache_right, cache_top] adbn.train(train_x, train_x_ass, cache=cache) print "... trained associative DBN" # Reconstruct images test_x_recon = adbn.recall(test_x, associate_steps=500, recall_steps=0) print "... reconstructed images" # Classify the reconstructions # 1. Train classifier on original images score_orig = clf_orig.get_score(test_x_recon, test_y_ass.eval()) # 2. Train classifier on reconstructed images - reconstruction obtained by right dbn right_dbn = adbn.dbn_right mapped_train_recon = right_dbn.reconstruct(mapped_dataset[0][0], k=1, plot_n=100, plot_every=1, img_name='right_dbn_reconstruction') clf_recon = SimpleClassifier('knn', mapped_train_recon, mapped_dataset[0][1].eval()) score_retrain = clf_recon.get_score(test_x_recon, test_y_ass.eval()) out_msg = '{} (orig, retrain):{},{}'.format(adbn, score_orig, score_retrain) print out_msg
def experiment_dbn(project_name, mapping, shape): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset happy_set = kanade_loader.load_kanade(set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, n=100 ) h_tr, h_vl, h_te = happy_set h_tr_x, h_tr_y = h_tr h_vl_x, h_vl_y = h_vl h_te_x, h_te_y = h_te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(h_tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) concat1 = theano.function([], T.concatenate([h_tr_x, p_tr_x], axis=1))() tr_x = theano.shared(concat1, name='tr_x') # initial_y = np.zeros(h_te_x.get_value(True).shape) initial_y = np.random.normal(0, 1, h_te_x.get_value(True).shape) initial_y = theano.shared(initial_y, name='initial_y') te_x = theano.shared(theano.function([], T.concatenate([h_te_x, initial_y], axis=1))().astype(t_float_x)) configs = [] jj = 0 for lr1 in [0.0001]:#, 0.001, 0.01]: for h_n1 in [500]: for h_n2 in [100, 250, 500]: for h_n3 in [100, 250, 500]: config = get_brain_model_DBN(shape, data_manager=StorageManager('{}/{}'.format(project_name, jj), log=False)) config.rbm_configs[0].h_n = h_n1 config.rbm_configs[1].v_n = h_n1 config.rbm_configs[1].h_n = h_n2 config.rbm_configs[2].v_n = h_n2 config.rbm_configs[2].h_n = h_n3 config.topology = [25 * 25 * 2, h_n1, h_n2, h_n3] config.rbm_configs[1].train_params.learning_rate = lr1 config.rbm_configs[2].train_params.learning_rate = lr1 configs.append(config) jj += 1 for epoch in xrange(10): for i, config in enumerate(configs): brain_c = DBN.DBN(config) brain_c.pretrain(tr_x, cache=[True, True, True], train_further=[True, True, True]) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='{}_{}_recon_{}'.format(i, epoch, shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] recon_pair = brain_c.reconstruct(te_x, k=1, plot_n=100, img_name='{}_{}_single_recon_{}'.format(i, epoch, shape)) recon = recon_pair[:, (shape ** 2):] labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) # write_evaluation(f, labels, proportion) errors = {} for emo in xrange(len(kanade_loader.emotion_dict)): errors[emo] = [proportion[emo]] for j in xrange(3): brain_c.fine_tune(tr_x, epochs=1) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='{}_{}_recon_ft_{}'.format(i, epoch, shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] recon_pair = brain_c.reconstruct(te_x, k=1, plot_n=100, img_name='{}_{}_single_recon_ft_{}'.format(i, epoch, shape)) recon = recon_pair[:, (shape ** 2):] labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) for k, l in enumerate(labels): errors[k].append(proportion[k]) print errors f.write('{}, {}\n'.format(i, brain_c)) for emo in errors: f.write('{}:'.format(kanade_loader.emotion_rev_dict[emo + 1])) for v in errors[emo]: f.write('%.2f,' % v) f.write('\n') f.write('\n') f.write('\n') f.close() data_manager.finish()
def experiment_adbn(project_name, mapping, shape): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset dataset = kanade_loader.load_kanade( set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, # n=100 ) tr, vl, te = dataset tr_x, tr_y = tr te_x, te_y = te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) configs = [] for lr1 in [0.01, 0.001, 0.0001]: for dropout in [True]: for n_association in [50, 100, 250]: config = get_brain_model_AssociativeDBN( shape, n_association=n_association, h_n=250, h_n2=100, dropout=dropout) config.left_dbn.rbm_configs[1].train_params.learning_rate = lr1 config.right_dbn.rbm_configs[ 1].train_params.learning_rate = lr1 config.top_rbm.train_params.learning_rate = lr1 # config.n_association = n_association # config.left_dbn.topology = [shape ** 2, h_n, h_n] # config.left_dbn.rbm_configs[0].h_n = h_n # config.left_dbn.rbm_configs[1].v_n = h_n # config.left_dbn.rbm_configs[1].h_n = h_n configs.append(config) for epoch in xrange(10): for i, config in enumerate(configs): brain_c = associative_dbn.AssociativeDBN( config, data_manager=StorageManager('{}/{}'.format(project_name, i), log=False)) brain_c.train(tr_x, p_tr_x, cache=[[True, True, True], [True, True, True], True], train_further=[[True, True, True], [True, True, True], True]) # Reconstruction recon_p_tr_x = brain_c.dbn_right.reconstruct( p_tr_x, k=10, plot_every=1, plot_n=100, img_name='{}_right_{}'.format(epoch, shape)) recon = brain_c.recall( te_x, associate_steps=5, recall_steps=0, img_name='adbn_child_recon_{}'.format(shape)) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) # write_evaluation(f, labels, proportion) errors = {} y_types = ['active_h', 'v_noisy_active_h', 'zero', 'binomial0.1'] for y_type in y_types: errors[y_type] = {} for emo in xrange(len(kanade_loader.emotion_dict)): errors[y_type][emo] = [proportion[emo]] for j in xrange(1): brain_c.fine_tune(tr_x, p_tr_x, epochs=1) recon_p_tr_x = brain_c.dbn_right.reconstruct( p_tr_x, k=10, plot_every=1, plot_n=100, img_name='{}_right_ft{}_{}'.format(epoch, j, shape)) for y_type in y_types: # Reconstruction recon = brain_c.recall(te_x, associate_steps=5, recall_steps=0, img_name='{}_{}_ft{}_{}'.format( y_type, epoch, j, shape), y_type=y_type) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) for i, l in enumerate(labels): errors[y_type][i].append(proportion[i]) print errors f.write('{}\n'.format(brain_c)) for y_type in y_types: f.write('{}\n'.format(y_type)) for emo in errors[y_type]: f.write('{}:'.format(kanade_loader.emotion_rev_dict[emo + 1])) for v in errors[y_type][emo]: f.write('%.2f,' % v) f.write('\n') f.write('\n') f.write('\n') f.close() data_manager.finish()
def KanadeAssociativeRBM(cache=False, train_further=False): print "Testing Associative RBM which tries to learn the ID map " # print "Testing Associative RBM which tries to learn the following mapping: {anger, saddness, disgust} -> {sadness}, {contempt, happy, surprise} -> {happy}" # project set-up data_manager = store.StorageManager('Kanade/OptMFSparse0.01RBMTest', log=True) # data_manager = store.StorageManager('Kanade/OptAssociativeRBMTest', log=True) shape = 25 dataset_name = 'sharp_equi{}_{}'.format(shape, shape) # Load kanade database mapping = None # id map # mapping = {'anger': 'sadness', 'contempt': 'happy', 'disgust': 'sadness', 'fear': 'sadness', 'happy': 'happy', # 'sadness': 'sadness', 'surprise': 'happy'} train, valid, test = loader.load_kanade(pre={'scale': True}, set_name=dataset_name) train_x, train_y = train test_x, test_y = test # Sample associated image train_x_mapped, train_y_mapped = loader.sample_image(train_y, mapping=mapping, pre={'scale': True}, set_name=dataset_name) test_x_mapped, test_y_mapped = loader.sample_image(test_y, mapping=mapping, pre={'scale': True}, set_name=dataset_name) # Concatenate images concat1 = T.concatenate([train_x, train_x_mapped], axis=1) # concat2 = T.concatenate([train_x_mapped, train_x], axis=1) # concat = T.concatenate([concat1, concat2], axis=0) # train_tX = theano.function([], concat)() train_tX = theano.function([], concat1)() train_X = theano.shared(train_tX) # Train classifier to be used for classifying reconstruction associated image layer # mapped_data = loader.load_kanade(#emotions=['sadness', 'happy'], # pre={'scale': True}, # set_name=dataset_name) # Target Image # clf_orig = SimpleClassifier('logistic', mapped_data[0][0], mapped_data[0][1]) clf_orig = SimpleClassifier('logistic', train_x, train_y) # Initialise RBM tr = rbm_config.TrainParam(learning_rate=0.0001, momentum_type=rbm_config.NESTEROV, momentum=0.9, weight_decay=0.0001, sparsity_constraint=True, sparsity_target=0.01, sparsity_cost=100, sparsity_decay=0.9, batch_size=10, epochs=10) n_visible = shape * shape * 2 n_hidden = 500 config = rbm_config.RBMConfig() config.v_n = n_visible config.h_n = n_hidden config.v_unit = rbm_units.GaussianVisibleUnit # config.h_unit = rbm_units.ReLUnit config.progress_logger = rbm_logger.ProgressLogger(img_shape=(shape * 2, shape)) config.train_params = tr rbm = RBM(config) print "... initialised RBM" # Load RBM (test) loaded = data_manager.retrieve(str(rbm)) if loaded: rbm = loaded else: rbm.set_initial_hidden_bias() rbm.set_hidden_mean_activity(train_X) # Train RBM - learn joint distribution # rbm.pretrain_lr(train_x, train_x01) for i in xrange(0, 10): if not cache or train_further: rbm.train(train_X) data_manager.persist(rbm) print "... reconstruction of associated images" # Get reconstruction with train data to get 'mapped' images to train classifiers on reconstruction = rbm.reconstruct(train_X, 1, plot_n=100, plot_every=1, img_name='recon_train') reconstruct_assoc_part = reconstruction[:, (shape ** 2):] # Get associated images of test data nsamples = np.random.normal(0, 1, test_x.get_value(True).shape).astype(np.float32) initial_y = theano.shared(nsamples, name='initial_y') utils.save_images(nsamples[0:100], 'initialisation.png', (10, 10), (25, 25)) test_x_associated = rbm.reconstruct_association_opt(test_x, initial_y, 5, 0., plot_n=100, plot_every=1, img_name='recon_test_gibbs') mf_recon = rbm.mean_field_inference_opt(test_x, y=initial_y, sample=False, k=10, img_name='recon_test_mf_raw') # Concatenate images test_MFX = theano.function([], T.concatenate([test_x, mf_recon], axis=1))() test_MF = theano.shared(test_MFX) reconstruction = rbm.reconstruct(test_MF, 1, plot_n=100, plot_every=1, img_name='recon_test_mf_recon') mf_recon = reconstruction[:, (shape ** 2):] print "... reconstructed" # Classify the reconstructions # 1. Train classifier on original images score_orig = clf_orig.get_score(test_x_associated, test_y_mapped.eval()) score_orig_mf = clf_orig.get_score(test_x_associated, test_y_mapped.eval()) # 2. Train classifier on reconstructed images clf_recon = SimpleClassifier('logistic', reconstruct_assoc_part, train_y_mapped.eval()) score_retrain = clf_recon.get_score(test_x_associated, test_y_mapped.eval()) score_retrain_mf = clf_recon.get_score(mf_recon, test_y_mapped.eval()) out_msg = '{} (orig, retrain):{},{}'.format(rbm, score_orig, score_retrain) out_msg2 = '{} (orig, retrain):{},{}'.format(rbm, score_orig_mf, score_retrain_mf) print out_msg print out_msg2
def test_kanades_grrbm(self): nvis = 625 nhid = 1000 train_n = 10000 batch_n = 20 tr = rbm_config.TrainParam(learning_rate=0.05, momentum_type=rbm_config.CLASSICAL, momentum=0.5, weight_decay=0, sparsity_constraint=False, sparsity_target=0.1 ** 9, sparsity_cost=10 ** 8, sparsity_decay=0.9, epochs=5, batch_size=batch_n) config = rbm_config.RBMConfig() config.v_n = nvis config.h_n = nhid config.v_unit = rbm_units.GaussianVisibleUnit config.h_unit = rbm_units.ReLUnit config.progress_logger = rbm_logger.ProgressLogger() config.train_params = tr np_rand = np.random.RandomState(123) # Weights W = np_rand.normal(0, 0.01, size=(nvis, nhid)).astype(np.float32) vb = np.zeros(nvis, dtype=np.float32) hb = np.zeros(nhid, dtype=np.float32) Wt = theano.shared(W, name='W') vbt = theano.shared(vb, name='vbias') hbt = theano.shared(hb, name='hbias') g_rbm = rbm.RBM(config, W=Wt, h_bias=hbt, v_bias=vbt) self.assertTrue(g_rbm) self.assertTrue(isinstance(g_rbm.v_unit, rbm_units.GaussianVisibleUnit)) self.assertTrue(isinstance(g_rbm.h_unit, rbm_units.RBMUnit)) self.assertTrue(np.count_nonzero(g_rbm.W.get_value(borrow=True) - W) == 0) self.assertTrue(np.count_nonzero(g_rbm.v_bias.get_value(borrow=True) - vb) == 0) self.assertTrue(np.count_nonzero(g_rbm.h_bias.get_value(borrow=True) - hb) == 0) tr, vl, te = k_loader.load_kanade(n=batch_n, pre={'scale': True}) v = tr[0] vv = v.get_value(borrow=True).ravel() table = ss.itemfreq(vv) print table x = [pt[0] for pt in table] y = [pt[1] for pt in table] plt.plot(x, y) plt.show() # v = theano.shared(x) h = g_rbm.h_unit.activate(g_rbm.h_unit.scale(T.dot(v, W) + hb)) _, _, h = g_rbm.sample_h_given_v(v) _, _, vs = g_rbm.sample_v_given_h(h) _, _, hs = g_rbm.sample_h_given_v(vs) dw = (T.dot(v.T, h) - T.dot(vs.T, hs)) / batch_n dv = T.mean(v - vs, axis=0) dh = T.mean(h - hs, axis=0) gr = g_rbm.get_partial_derivatives(v, None)['gradients'] gdw, gdv, gdh = gr[0], gr[1], gr[2] print gdw, gdv, gdh compute_derivative = theano.function([], [dw, dv, dh, gdw, gdv, gdh]) for i in xrange(1): a, b, c, d, e, f = compute_derivative() # print a, b, c print 'unfold' print a[0][0:5], b[1:5], c[1:5] print 'rbm' print d[0][0:5], e[1:5], f[1:5]
def test_load_n(self): dataset = loader.load_kanade(shared=False, set_name='50_50', n=100) self.assertTrue(len(dataset[0]) == 100)
def associate_data2dataDBN(cache=False): print "Testing Associative DBN which tries to learn even-oddness of numbers" # project set-up data_manager = store.StorageManager('Kanade/associative_dbn_test', log=True) # Load mnist hand digits, class label is already set to binary dataset = loader.load_kanade(n=500, emotions=['anger', 'sadness', 'happy'], pre={'scale2unit': True}) train_x, train_y = dataset train_x01 = loader.sample_image(train_y) dataset01 = loader.load_kanade(n=500) # Initialise RBM parameters # fixed base train param base_tr = RBM.TrainParam(learning_rate=0.001, momentum_type=RBM.CLASSICAL, momentum=0.5, weight_decay=0.0005, sparsity_constraint=False, epochs=20) # top layer parameters tr = RBM.TrainParam(learning_rate=0.001, # find_learning_rate=True, momentum_type=RBM.NESTEROV, momentum=0.5, weight_decay=0.001, sparsity_constraint=False, epochs=20) tr_top = RBM.TrainParam(learning_rate=0.001, # find_learning_rate=True, momentum_type=RBM.CLASSICAL, momentum=0.5, weight_decay=0.001, sparsity_constraint=False, epochs=20) # Layer 1 # Layer 2 # Layer 3 # topology = [784, 500, 500, 100] config = associative_dbn.DefaultADBNConfig() config.topology_left = [625, 500, 500, 100] config.topology_right = [625, 500, 500, 100] config.reuse_dbn = False config.top_rbm_params = tr_top config.base_rbm_params = [base_tr, tr, tr] count = 0 for cd_type in [RBM.CLASSICAL, RBM.PERSISTENT]: for n_ass in [100, 250, 500, 750, 1000]: config.n_association = n_ass config.top_cd_type = cd_type # Construct DBN ass_dbn = associative_dbn.AssociativeDBN(config=config, data_manager=data_manager) # Train for trainN in xrange(0, 5): ass_dbn.train(train_x, train_x01, cache=cache) for n_recall in [1, 3, 10]: for n_think in [0, 1, 3, 5, 10]: # 1, 3, 5, 7, 10]: # Reconstruct sampled = ass_dbn.recall(train_x, n_recall, n_think) # Sample from top layer to generate data sample_n = 100 utils.save_images(sampled, image_name='{}_reconstruced_{}_{}_{}.png'.format(count, n_ass, n_recall, n_think), shape=(sample_n / 10, 10), img_shape=(25, 25)) count += 1
def experimentChild(project_name, mapping, shape, model): # Project set up data_manager = StorageManager(project_name, log=True) f = open(project_name + '.txt', mode='a') f.write(project_name) f.write('\tMODEL=') f.write(model) f.write('\tSHAPE=') f.write('%d' % shape) f.write('\n') dataset_name = 'sharp_equi{}_{}'.format(shape, shape) preprocesssing = {'scale': True} # Get dataset happy_set = kanade_loader.load_kanade(set_name=dataset_name, emotions=mapping.keys(), pre=preprocesssing, ) h_tr, h_vl, h_te = happy_set h_tr_x, h_tr_y = h_tr h_vl_x, h_vl_y = h_vl h_te_x, h_te_y = h_te # Sample Parent emotion p_tr_x, p_tr_y = kanade_loader.sample_image2(h_tr_y, mapping=mapping, pre=preprocesssing, set_name=dataset_name) concat1 = theano.function([], T.concatenate([h_tr_x, p_tr_x], axis=1))() tr_x = theano.shared(concat1, name='tr_x') c1 = T.concatenate([h_tr_x, p_tr_x], axis=1) c2 = T.concatenate([p_tr_x, h_tr_x], axis=1) c3 = theano.function([], T.concatenate([c1, c2], axis=0))() tr_x_mixed = theano.shared(c3, name='tr_x_mixed') # initial_y = np.zeros(h_te_x.get_value(True).shape) initial_y = np.random.normal(0, 1, h_te_x.get_value(True).shape) initial_y = theano.shared(initial_y, name='initial_y') te_x = theano.shared(theano.function([], T.concatenate([h_te_x, initial_y], axis=1))().astype(t_float_x)) if model == 'rbm': brain_c = get_brain_model_RBM(shape) load = data_manager.retrieve(str(brain_c)) if load: brain_c = load else: brain_c.set_initial_hidden_bias() brain_c.set_hidden_mean_activity(tr_x) brain_c.train(tr_x) data_manager.persist(brain_c) # brain_c.train(tr_x_mixed) recon = brain_c.reconstruct_association_opt(h_te_x, initial_y, k=10, plot_n=100, img_name='rbm_child_recon_{}'.format(shape)) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='rbm_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] elif model == 'dbn': brain_c = get_brain_model_DBN(shape, data_manager) brain_c.pretrain(tr_x, cache=[True, True, True], train_further=[True, True, True]) recon_pair = brain_c.reconstruct(tr_x, k=1, plot_n=100, img_name='dbn_pair_recon_{}'.format(shape)) recon_p_tr_x = recon_pair[:, (shape ** 2):] recon_pair = brain_c.reconstruct(te_x, k=1, plot_n=100, img_name='dbn_child_recon_{}'.format(shape)) recon = recon_pair[:, (shape ** 2):] elif model == 'adbn': config = get_brain_model_AssociativeDBN(shape) brain_c = associative_dbn.AssociativeDBN(config) brain_c.train(h_tr_x, p_tr_x, cache=[[True, True, True], [True, True, True], True], train_further=[[True, True, True], [True, True, True], True]) # Reconstruction recon_p_tr_x = brain_c.dbn_right.reconstruct(p_tr_x, k=10, plot_every=1, plot_n=100, img_name='adbn_right_recon_{}'.format(shape)) recon = brain_c.recall(h_te_x, associate_steps=5, recall_steps=0, img_name='adbn_child_recon_{}'.format(shape)) labels, proportion = evaluate(p_tr_y, recon, recon_p_tr_x) write_evaluation(f, labels, proportion) f.write('\n') f.close() data_manager.finish()