assert FLAGS.dataset in ['mnist', 'cifar10', 'custom'] assert FLAGS.opt in ['gradient_descent', 'ada_grad', 'momentum', 'adam'] assert FLAGS.loss_func in ['mean_squared', 'softmax_cross_entropy'] if __name__ == '__main__': utilities.random_seed_np_tf(FLAGS.seed) if FLAGS.dataset == 'mnist': # ################# # # MNIST Dataset # # ################# # trX, trY, vlX, vlY, teX, teY = datasets.load_mnist_dataset( mode='supervised') elif FLAGS.dataset == 'cifar10': # ################### # # Cifar10 Dataset # # ################### # trX, trY, teX, teY = datasets.load_cifar10_dataset(FLAGS.cifar_dir, mode='supervised') vlX = teX[:5000] # Validation set is the first half of the test set vlY = teY[:5000] elif FLAGS.dataset == 'custom': # ################## #
# Parameters validation assert FLAGS.dataset in ['mnist', 'cifar10', 'custom'] assert len(rbm_layers) > 0 if __name__ == '__main__': utilities.random_seed_np_tf(FLAGS.seed) if FLAGS.dataset == 'mnist': # ################# # # MNIST Dataset # # ################# # trX, vlX, teX = datasets.load_mnist_dataset(mode='unsupervised') trRef = trX vlRef = vlX teRef = teX elif FLAGS.dataset == 'cifar10': # ################### # # Cifar10 Dataset # # ################### # trX, teX = datasets.load_cifar10_dataset(FLAGS.cifar_dir, mode='unsupervised') # Validation set is the first half of the test set vlX = teX[:5000] trRef = trX
assert FLAGS.dataset in ['mnist', 'cifar10', 'custom'] assert FLAGS.cifar_dir != '' if FLAGS.dataset == 'cifar10' else True assert FLAGS.visible_unit_type in ['bin', 'gauss'] if __name__ == '__main__': utilities.random_seed_np_tf(FLAGS.seed) if FLAGS.dataset == 'mnist': # ################# # # MNIST Dataset # # ################# # trX, vlX, teX = datasets.load_mnist_dataset(mode='unsupervised') width, height = 28, 28 elif FLAGS.dataset == 'cifar10': # ################### # # Cifar10 Dataset # # ################### # trX, teX = datasets.load_cifar10_dataset(FLAGS.cifar_dir, mode='unsupervised') vlX = teX[:5000] # Validation set is the first half of the test set width, height = 32, 32 elif FLAGS.dataset == 'custom': # ################## #
def generate_encodings(): """Generates encodings for MNIST dataset.""" train_images, train_labels, validation_images, validation_labels, test_images, test_labels = datasets.load_mnist_dataset( mode='supervised') # Convert one-hot to integer train_labels = [np.argmax(label) for label in train_labels] test_labels = [np.argmax(label) for label in test_labels] # Initialize the autoencoder autoencoder = Autoencoder([2048, 1024, 256, 128], pretrain=False, pretrain_epochs=0, finetune_epochs=120, finetune_batch_size=64) autoencoder.train(train_images) autoencoder.generate_encodings( train_images, train_labels, save_to_path='../data/mnist_train_encodings_6') autoencoder.generate_encodings( test_images, test_labels, save_to_path='../data/mnist_test_encodings_6')
# Parameters validation assert FLAGS.dataset in ['mnist', 'cifar10', 'custom'] assert FLAGS.finetune_act_func in ['sigmoid', 'tanh', 'relu'] assert len(rbm_layers) > 0 if __name__ == '__main__': utilities.random_seed_np_tf(FLAGS.seed) if FLAGS.dataset == 'mnist': # ################# # # MNIST Dataset # # ################# # trX, trY, vlX, vlY, teX, teY = datasets.load_mnist_dataset(mode='supervised') elif FLAGS.dataset == 'cifar10': # ################### # # Cifar10 Dataset # # ################### # trX, trY, teX, teY = datasets.load_cifar10_dataset(FLAGS.cifar_dir, mode='supervised') vlX = teX[:5000] # Validation set is the first half of the test set vlY = teY[:5000] elif FLAGS.dataset == 'custom': # ################## # # Custom Dataset #
def generate_encodings(): """Generates encodings for MNIST dataset.""" train_images, train_labels, validation_images, validation_labels, test_images, test_labels = datasets.load_mnist_dataset(mode='supervised') # Convert one-hot to integer train_labels = [np.argmax(label) for label in train_labels] test_labels = [np.argmax(label) for label in test_labels] # Initialize the autoencoder autoencoder = Autoencoder([2048, 1024, 256, 64], pretrain=False, pretrain_epochs=0, finetune_epochs=120, finetune_batch_size=64) autoencoder.train(train_images) autoencoder.generate_encodings(train_images, train_labels, save_to_path='../data/mnist_train_encodings_3') autoencoder.generate_encodings(test_images, test_labels, save_to_path='../data/mnist_test_encodings_3')