def reconstruction_loss_for(self, output_nodes): data = self.mnist_data bw_layer1 = Layer(InputLayer(784), output_nodes, session=self.session, noise_std=1.0, bactivate=True) cost = bw_layer1.unsupervised_cost_train() optimizer = tf.train.AdamOptimizer(0.1).minimize(cost) self.session.run(tf.initialize_all_variables()) end_epoch = data.train.epochs_completed + 3 while data.train.epochs_completed <= end_epoch: train_x, train_y = data.train.next_batch(100) self.session.run(optimizer, feed_dict={bw_layer1.input_placeholder: train_x}) result = self.session.run(bw_layer1.unsupervised_cost_predict(), feed_dict={bw_layer1.input_placeholder: data.test.images}) print("denoising with %s hidden layer had cost %s" % (output_nodes, result)) return result
def test_noise_reconstruction(self): INPUT_DIM = 10 HIDDEN_NODES = 1 bw_layer1 = Layer(InputLayer(INPUT_DIM), HIDDEN_NODES, session=self.session, noise_std=1.0, bactivate=True) # single cluster reconstruct data = [] for i in range(10): data.append([i*.1]*INPUT_DIM) cost = bw_layer1.unsupervised_cost_train() optimizer = tf.train.AdamOptimizer(0.1).minimize(cost) self.session.run(tf.initialize_all_variables()) for j in range(200): self.session.run(optimizer, feed_dict={bw_layer1.input_placeholder: data}) result = self.session.run(bw_layer1.unsupervised_cost_predict(), feed_dict={bw_layer1.input_placeholder: data}) print("denoising with %s hidden layer had cost %s" % (HIDDEN_NODES, result))