Data = SSL_DATA(data.x_unlabeled, data.y_unlabeled, x_test=data.x_test, y_test=data.y_test, x_labeled=data.x_labeled, y_labeled=data.y_labeled, dataset='mnist', seed=seed) n_x, n_y = Data.INPUT_DIM, Data.NUM_CLASSES if modelName == 'm2': # standard M2: Kingma et al. (2014) model = m2(n_x, n_y, n_z, n_hidden, x_dist=x_dist, batchnorm=batchnorm, mc_samples=mc_samps, l2_reg=l2_reg) elif modelName == 'adgm': # auxiliary DGM: Maaloe et al. (2016) model = adgm(n_x, n_y, n_z, n_a, n_hidden, x_dist=x_dist, alpha=alpha, batchnorm=batchnorm, mc_samples=mc_samps,
mc_samps = 1 eval_samps = 1000 verbose = 3 Data.reset_counters() results=[] for i in range(num_runs): print("Starting work on run: {}".format(i)) Data.reset_counters() np.random.seed(2) tf.set_random_seed(2) tf.reset_default_graph() model_token = token+'-'+str(i)+'---' if model_name == 'm2': model = m2(n_x, n_y, n_z, n_hidden, x_dist=x_dist, batchnorm=batchnorm, mc_samples=mc_samps, l2_reg=l2_reg, learning_paradigm=learning_paradigm, name=model_token, ckpt = model_token) if model_name == 'gm_dgm': model = gm_dgm(n_x, n_y, n_z, n_hidden, x_dist=x_dist, batchnorm=batchnorm, alpha=alpha, mc_samples=mc_samps, l2_reg=l2_reg, learning_paradigm=learning_paradigm, name=model_token, ckpt = model_token, prior=prior[0:n_y]/float(sum(prior[0:n_y])), loss_ratio=loss_ratio, output_dir=output_dir) if learning_paradigm == 'semisupervised' or 'semi-unsupervised': model.loss = model.compute_loss() elif learning_paradigm == 'unsupervised': model.loss = model.compute_unsupervised_loss() elif model.learning_paradigm == 'supervised': model.loss = model.compute_supervised_loss() model.train(Data, n_epochs, l_bs, u_bs, lr, eval_samps=eval_samps, binarize=binarize, verbose=1) results.append(model.curve_array) np.save(os.path.join(output_dir,'curve_'+token+'_'+str(i)+'.npy'), model.curve_array) y_pred_test = predict_new(Data.data['x_test'])[0] conf_mat = confusion_matrix(Data.data['y_test'].argmax(1), y_pred_test.argmax(1))
results = [] for i in range(num_runs): print("Starting work on run: {}".format(i)) Data.reset_counters() np.random.seed(2) tf.set_random_seed(2) tf.reset_default_graph() model_token = token + '-' + str(i) + '---' if model_name == 'm2': model = m2(n_x, n_y, n_z, x_dist=x_dist, mc_samples=mc_samps, alpha=alpha, l2_reg=l2_reg, learning_paradigm=learning_paradigm, name=model_token, ckpt=model_token, output_dir=output_dir) if model_name == 'gm_dgm': model = gm_dgm(n_x, n_y, n_z, x_dist=x_dist, mc_samples=mc_samps, alpha=alpha, l2_reg=l2_reg, learning_paradigm=learning_paradigm, name=model_token,