def train_stacked_autoencoder(X_train, X_test): model = StackedAutoEncoder() """ Tensorflow needs to see the graph before initilize the variables for the computation """ model.build_graph() config = tf.ConfigProto(intra_op_parallelism_threads=_PROCESSORS, inter_op_parallelism_threads=_PROCESSORS) sess = tf.Session(config=config) sess.run(tf.global_variables_initializer()) print("Training Stacked Autoencoder...") for epoch in range(FLAGS.epochs * FLAGS.batch_size): batch_x = next_batch(FLAGS.batch_size, X_train) train_dict = { model.x: batch_x, model.learning_rate_stacked: FLAGS.learning_rate_stacked, model.reg_stacked: FLAGS.reg_stacked, model.noise: FLAGS.noise, model.fraction: FLAGS.fraction } sess.run(model.optimizer, feed_dict=train_dict) if epoch % 10 == 0: c = sess.run([model.loss], feed_dict=train_dict) print("Train Loss: ", c) """ Computing the encoded version of X_train and X_test """ X_train_dict = { model.x: X_train, model.learning_rate_stacked: FLAGS.learning_rate_stacked, model.reg_stacked: FLAGS.reg_stacked, model.noise: FLAGS.noise, model.fraction: FLAGS.fraction } X_test_dict = { model.x: X_test, model.learning_rate_stacked: FLAGS.learning_rate_stacked, model.reg_stacked: FLAGS.reg_stacked, model.noise: FLAGS.noise, model.fraction: FLAGS.fraction } X_train = np.asarray(sess.run([model.x_encoded], feed_dict=X_train_dict)) X_test = np.asarray(sess.run([model.x_encoded], feed_dict=X_test_dict)) X_train = X_train.reshape(-1, 30) X_test = X_test.reshape(-1, 30) """ Closing the session to avoid cnflicts with the test """ return X_train, X_test
# Mnist digits dataset dataset = torchvision.datasets.MNIST( root='../mnist/', train=True, # this is training data transform=torchvision.transforms.ToTensor( ), # Converts a PIL.Image or numpy.ndarray to # torch.FloatTensor of shape (C x H x W) and normalize in the range [0.0, 1.0] download=DOWNLOAD_DATA, # download it if you don't have it ) # dataset = MNIST('../data/mnist/', transform=img_transform) dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=8) print("It is DAE") model = StackedAutoEncoder().cuda() for epoch in range(num_epochs): if epoch % 10 == 0: # Test the quality of our features with a randomly initialzed linear classifier. classifier = nn.Linear(10, 10).cuda() criterion = nn.CrossEntropyLoss() optimizer = torch.optim.Adam(classifier.parameters(), lr=0.001) model.train() total_time = time.time() correct = 0 for i, data in enumerate(dataloader): img, target = data target = Variable(target).cuda() img = Variable(img).cuda()