def testShape(self): kernel_shape = [5, 5] shape = kernel_shape + [2, 2] # The input has to be 4-dimensional. gradients = tf.random_normal(shape) net = networks.KernelDeepLSTM(layers=(1, 1), kernel_shape=kernel_shape) state = net.initial_state_for_inputs(gradients) update, _ = net(gradients, state) self.assertEqual(update.get_shape().as_list(), shape)
def testTrainable(self): """Tests the network contains trainable variables.""" kernel_shape = [5, 5] shape = kernel_shape + [2, 2] # The input has to be 4-dimensional. gradients = tf.random_normal(shape) net = networks.KernelDeepLSTM(layers=(1, ), kernel_shape=kernel_shape) state = net.initial_state_for_inputs(gradients) net(gradients, state) # Weights and biases for two layers. variables = nn.get_variables_in_module(net) self.assertEqual(len(variables), 4)
def testResults(self, initializer): """Tests zero updates when last layer is initialized to zero.""" kernel_shape = [5, 5] shape = kernel_shape + [2, 2] # The input has to be 4-dimensional. gradients = tf.random_normal(shape) net = networks.KernelDeepLSTM(layers=(1, 1), kernel_shape=kernel_shape, initializer=initializer) state = net.initial_state_for_inputs(gradients) update, _ = net(gradients, state) with self.test_session() as sess: sess.run(tf.global_variables_initializer()) update_np = sess.run(update) self.assertAllEqual(update_np, np.zeros(shape))