VAE_model_path = 'models/VAE_600-600-0.0003-50.cpkt' min_std = 0.1 #Dimensions with std < min_std are removed before training with GC data_lab, data_ulab, data_valid, data_test = encode_dataset( VAE_model_path, min_std ) dim_x = data_lab.shape[1] / 2 dim_y = y_lab.shape[1] num_examples = data_lab.shape[0] + data_ulab.shape[0] ################################### ''' Train Generative Classifier ''' ################################### GC = GenerativeClassifier( dim_x, dim_z, dim_y, num_examples, num_lab, num_batches, hidden_layers_px = hidden_layers_px, hidden_layers_qz = hidden_layers_qz, hidden_layers_qy = hidden_layers_qy, alpha = alpha ) GC.train( x_labelled = data_lab, y = y_lab, x_unlabelled = data_ulab, x_valid = data_valid, y_valid = y_valid, epochs = epochs, learning_rate = learning_rate, seed = seed, print_every = 10, load_path = None ) ############################ ''' Evaluate on Test Set ''' ############################
data_lab, data_ulab, data_valid, data_test = encode_dataset( VAE_model_path, min_std) dim_x = data_lab.shape[1] / 2 dim_y = y_lab.shape[1] num_examples = data_lab.shape[0] + data_ulab.shape[0] #%% ################################### ''' Train Generative Classifier ''' ################################### GC = GenerativeClassifier(dim_x, dim_z, dim_y, num_examples, num_lab, num_batches, hidden_layers_px=hidden_layers_px, hidden_layers_qz=hidden_layers_qz, hidden_layers_qy=hidden_layers_qy, alpha=alpha) GC.train(x_labelled=data_lab, y=y_lab, x_unlabelled=data_ulab, x_valid=data_valid, y_valid=y_valid, epochs=epochs, learning_rate=learning_rate, seed=seed, print_every=10, load_path=None)
def main(flags, data): ############################# ''' Experiment Parameters ''' ############################# num_lab_ratio = flags.labeled num_batches = 100 #Number of minibatches in a single epoch dim_z = 50 #Dimensionality of latent variable (z) epochs = flags.epochs #Number of epochs through the full dataset learning_rate = 3e-4 #Learning rate of ADAM alpha = 0.1 #Discriminatory factor (see equation (9) of http://arxiv.org/pdf/1406.5298v2.pdf) seed = 31415 #Seed for RNG #Neural Networks parameterising p(x|z,y), q(z|x,y) and q(y|x) hidden_layers_px = [500] hidden_layers_qz = [500] hidden_layers_qy = [500] #################### ''' Load Dataset ''' #################### #Uses anglpy module from original paper (linked at top) to split the dataset for semi-supervised training train_x, train_y, valid_x, valid_y, test_x, test_y = data.load_numpy_split( binarize_y=True) x_l, y_l, x_u, y_u, num_lab = data.create_semisupervised( train_x, train_y, num_lab_ratio) x_lab, y_lab = x_l.T, y_l.T x_ulab, y_ulab = x_u.T, y_u.T x_valid, y_valid = valid_x.T, valid_y.T x_test, y_test = test_x.T, test_y.T ################ ''' Load VAE ''' ################ VAE_model_path = flags.vaemodel min_std = 0.1 #Dimensions with std < min_std are removed before training with GC data_lab, data_ulab, data_valid, data_test = encode_dataset( VAE_model_path, x_lab, x_ulab, x_valid, x_test, min_std) dim_x = data_lab.shape[1] // 2 dim_y = y_lab.shape[1] num_examples = data_lab.shape[0] + data_ulab.shape[0] ################################### ''' Train Generative Classifier ''' ################################### GC = GenerativeClassifier(dim_x, dim_z, dim_y, num_examples, num_lab, num_batches, hidden_layers_px=hidden_layers_px, hidden_layers_qz=hidden_layers_qz, hidden_layers_qy=hidden_layers_qy, alpha=alpha) GC.train(x_labelled=data_lab, y=y_lab, x_unlabelled=data_ulab, x_valid=data_valid, y_valid=y_valid, epochs=epochs, learning_rate=learning_rate, seed=seed, print_every=10, load_path=None) ############################ ''' Evaluate on Test Set ''' ############################ GC_eval = GenerativeClassifier(dim_x, dim_z, dim_y, num_examples, num_lab, num_batches) with GC_eval.session: GC_eval.saver.restore(GC_eval.session, GC.save_path) return GC_eval.predict_labels(data_test, y_test)