Ejemplo n.º 1
0
 def test_pass_through(self, width, height, channels, sizes):
     x = tf.keras.Input(shape=(width, height, channels))
     test_layer = nn_blocks.SPP(sizes=sizes)
     outx = test_layer(x)
     self.assertAllEqual(outx.shape.as_list(),
                         [None, width, height, channels * (len(sizes) + 1)])
     return
Ejemplo n.º 2
0
    def test_gradient_pass_though(self, width, height, channels, sizes):
        loss = tf.keras.losses.MeanSquaredError()
        optimizer = tf.keras.optimizers.SGD()
        test_layer = nn_blocks.SPP(sizes=sizes)

        init = tf.random_normal_initializer()
        x = tf.Variable(initial_value=init(shape=(1, width, height, channels),
                                           dtype=tf.float32))
        y = tf.Variable(initial_value=init(shape=(1, width, height,
                                                  channels * (len(sizes) + 1)),
                                           dtype=tf.float32))

        with tf.GradientTape() as tape:
            x_hat = test_layer(x)
            grad_loss = loss(x_hat, y)
        grad = tape.gradient(grad_loss, test_layer.trainable_variables)
        optimizer.apply_gradients(zip(grad, test_layer.trainable_variables))

        self.assertNotIn(None, grad)
        return