Beispiel #1
0
  def testMaskingSingleInput(self):

    class MaskedLayer(base_layers.Layer):

      def call(self, inputs, mask=None):
        if mask is not None:
          return inputs * mask
        return inputs

      def compute_mask(self, inputs, mask=None):
        return array_ops.ones_like(inputs)

    if context.in_graph_mode():
      x = base_layers.Input(shape=(32,))
      y = MaskedLayer()(x)  # pylint: disable=not-callable
      network = base_layers.Network(x, y)

      # test callability on Input
      x_2 = base_layers.Input(shape=(32,))
      y_2 = network(x_2)
      self.assertEqual(y_2.get_shape().as_list(), [None, 32])

      # test callability on regular tensor
      x_2 = array_ops.placeholder(dtype='float32', shape=(None, 32))
      y_2 = network(x_2)
      self.assertEqual(y_2.get_shape().as_list(), [None, 32])
    else:
      a = constant_op.constant([2] * 32)
      mask = constant_op.constant([0, 1] * 16)
      a._keras_mask = mask
      b = MaskedLayer().apply(a)
      self.assertTrue(hasattr(b, '_keras_mask'))
      self.assertAllEqual(self.evaluate(array_ops.ones_like(mask)),
                          self.evaluate(getattr(b, '_keras_mask')))
      self.assertAllEqual(self.evaluate(a * mask), self.evaluate(b))
Beispiel #2
0
  def testBasicNetwork(self):
    # minimum viable network
    x = base_layers.Input(shape=(32,))
    dense = core_layers.Dense(2)
    y = dense(x)
    network = base_layers.Network(x, y, name='dense_network')

    # test basic attributes
    self.assertEqual(network.name, 'dense_network')
    self.assertEqual(len(network.layers), 2)  # InputLayer + Dense
    self.assertEqual(network.layers[1], dense)
    self.assertEqual(network.weights, dense.weights)
    self.assertEqual(network.trainable_weights, dense.trainable_weights)
    self.assertEqual(network.non_trainable_weights, dense.non_trainable_weights)

    # test callability on Input
    x_2 = base_layers.Input(shape=(32,))
    y_2 = network(x_2)
    self.assertEqual(y_2.get_shape().as_list(), [None, 2])

    # test callability on regular tensor
    x_2 = array_ops.placeholder(dtype='float32', shape=(None, 32))
    y_2 = network(x_2)
    self.assertEqual(y_2.get_shape().as_list(), [None, 2])

    # test network `trainable` attribute
    network.trainable = False
    self.assertEqual(network.weights, dense.weights)
    self.assertEqual(network.trainable_weights, [])
    self.assertEqual(network.non_trainable_weights,
                     dense.trainable_weights + dense.non_trainable_weights)
Beispiel #3
0
  def testCrossDataFlows(self):
    # Test the ability to have multi-output layers with outputs that get routed
    # to separate layers

    class PowersLayer(base_layers.Layer):

      def call(self, inputs):
        return [inputs**2, inputs**3]

    x = base_layers.Input(shape=(32,))
    p1, p2 = PowersLayer()(x)  # pylint: disable=not-callable
    y1 = core_layers.Dense(2)(p1)
    y2 = core_layers.Dense(3)(p2)
    network = base_layers.Network(x, [y1, y2])

    self.assertEqual(len(network.layers), 4)  # InputLayer + 2 * Dense + PLayer

    # Test callability.
    x2 = base_layers.Input(shape=(32,))
    outputs = network(x2)

    self.assertEqual(type(outputs), list)
    self.assertEqual(len(outputs), 2)
    self.assertEqual(outputs[0].get_shape().as_list(), [None, 2])
    self.assertEqual(outputs[1].get_shape().as_list(), [None, 3])
Beispiel #4
0
    def testMultiIONetworkbuilding(self):
        input_a = base_layers.Input(shape=(32, ))
        input_b = base_layers.Input(shape=(16, ))
        a = core_layers.Dense(16)(input_a)

        class AddLayer(base_layers.Layer):
            def call(self, inputs):
                return inputs[0] + inputs[1]

            def _compute_output_shape(self, input_shape):
                return input_shape[0]

        c = AddLayer()([a, input_b])  # pylint: disable=not-callable
        c = core_layers.Dense(2)(c)

        network = base_layers.Network([input_a, input_b], [a, c])
        if context.in_eager_mode():
            a_val = constant_op.constant(
                np.random.random((10, 32)).astype('float32'))
            b_val = constant_op.constant(
                np.random.random((10, 16)).astype('float32'))
            outputs = network([a_val, b_val])
            self.assertEqual(len(outputs), 2)
            self.assertEqual(outputs[0].shape.as_list(), [10, 16])
            self.assertEqual(outputs[1].shape.as_list(), [10, 2])
Beispiel #5
0
  def testNetworkRecursion(self):
    # test the ability of networks to be used as layers inside networks.
    a = base_layers.Input(shape=(32,))
    b = core_layers.Dense(2)(a)
    net = base_layers.Network(a, b)

    c = base_layers.Input(shape=(32,))
    d = net(c)

    recursive_net = base_layers.Network(c, d)
    self.assertEqual(len(recursive_net.layers), 2)
    self.assertEqual(recursive_net.layers[1], net)
    self.assertEqual(len(recursive_net.weights), 2)

    # test callability
    x = array_ops.placeholder(dtype='float32', shape=(None, 32))
    y = recursive_net(x)
    self.assertEqual(y.get_shape().as_list(), [None, 2])
Beispiel #6
0
    def testSparseInput(self):
        class SparseSoftmax(base_layers.Layer):
            def call(self, inputs):
                return sparse_ops.sparse_softmax(inputs)

        x = base_layers.Input(shape=(32, ), sparse=True)
        y = SparseSoftmax()(x)  # pylint: disable=not-callable
        network = base_layers.Network(x, y)

        self.assertEqual(len(network.layers), 2)
        self.assertEqual(network.layers[0].sparse, True)
Beispiel #7
0
  def testMultiOutputNetwork(self):
    x = base_layers.Input(shape=(32,))
    y1 = core_layers.Dense(2)(x)
    y2 = core_layers.Dense(3)(x)
    network = base_layers.Network(x, [y1, y2])

    self.assertEqual(len(network.layers), 3)  # InputLayer + 2 * Dense

    # Test callability.
    x2 = base_layers.Input(shape=(32,))
    outputs = network(x2)

    self.assertEqual(type(outputs), list)
    self.assertEqual(len(outputs), 2)
    self.assertEqual(outputs[0].get_shape().as_list(), [None, 2])
    self.assertEqual(outputs[1].get_shape().as_list(), [None, 3])
Beispiel #8
0
    def testMultiInputNetwork(self):
        a = base_layers.Input(shape=(32, ), name='input_a')
        b = base_layers.Input(shape=(32, ), name='input_b')

        class AddLayer(base_layers.Layer):
            def call(self, inputs):
                assert len(inputs) == 2
                return inputs[0] + inputs[1]

        c = AddLayer()([a, b])  # pylint: disable=not-callable
        network = base_layers.Network([a, b], c)
        self.assertEqual(len(network.layers), 3)  # 2 * InputLayer + AddLayer

        # Test callability.
        a2 = base_layers.Input(shape=(32, ))
        b2 = base_layers.Input(shape=(32, ))
        c2 = network([a2, b2])
        self.assertEqual(c2.get_shape().as_list(), [None, 32])
Beispiel #9
0
  def testMultiInputMultiOutputNetworkSharedLayer(self):
    a = base_layers.Input(shape=(32,), name='input_a')
    b = base_layers.Input(shape=(32,), name='input_b')

    dense = core_layers.Dense(2)

    y1 = dense(a)
    y2 = dense(b)
    network = base_layers.Network([a, b], [y1, y2])
    self.assertEqual(len(network.layers), 3)  # 2 * InputLayer + Dense

    # Test callability.
    a2 = base_layers.Input(shape=(32,))
    b2 = base_layers.Input(shape=(32,))
    outputs = network([a2, b2])

    self.assertEqual(type(outputs), list)
    self.assertEqual(len(outputs), 2)
    self.assertEqual(outputs[0].get_shape().as_list(), [None, 2])
    self.assertEqual(outputs[1].get_shape().as_list(), [None, 2])
Beispiel #10
0
  def testNetworkAttributes(self):
    x = base_layers.Input(shape=(32,))
    z = core_layers.Dense(2, kernel_regularizer=lambda x: 0.01 * (x**2))(x)
    dense = core_layers.Dense(2, name='dense')
    dense.add_update(1)
    y = dense(z)
    net = base_layers.Network(x, y)

    # losses
    self.assertEqual(len(net.losses), 1)

    # updates
    self.assertEqual(len(net.updates), 1)

    # get_layer
    self.assertEqual(net.get_layer('dense'), dense)
    self.assertEqual(net.get_layer(index=2), dense)
    with self.assertRaises(ValueError):
      net.get_layer('dense_unknown')
    with self.assertRaises(ValueError):
      net.get_layer()
    with self.assertRaises(ValueError):
      net.get_layer(index=4)

    # input, output
    self.assertEqual(net.input, x)
    self.assertEqual(net.output, y)

    # input_shape, output_shape
    self.assertEqual(net.input_shape, (None, 32))
    self.assertEqual(net.output_shape, (None, 2))

    # get_*_at
    self.assertEqual(net.get_input_at(0), x)
    self.assertEqual(net.get_output_at(0), y)

    # _compute_output_shape
    self.assertEqual(net._compute_output_shape((3, 32)).as_list(), [3, 2])
Beispiel #11
0
    def testMaskingSingleInput(self):
        class MaskedLayer(base_layers.Layer):
            def call(self, inputs, mask=None):
                if mask is not None:
                    return inputs * mask
                return inputs

            def compute_mask(self, inputs, mask=None):
                return array_ops.ones_like(inputs)

        x = base_layers.Input(shape=(32, ))
        y = MaskedLayer()(x)  # pylint: disable=not-callable
        network = base_layers.Network(x, y)

        # test callability on Input
        x_2 = base_layers.Input(shape=(32, ))
        y_2 = network(x_2)
        self.assertEqual(y_2.get_shape().as_list(), [None, 32])

        # test callability on regular tensor
        x_2 = array_ops.placeholder(dtype='float32', shape=(None, 32))
        y_2 = network(x_2)
        self.assertEqual(y_2.get_shape().as_list(), [None, 32])
Beispiel #12
0
    def testSimpleNetworkBuilding(self):
        inputs = base_layers.Input(shape=(32, ))
        if context.in_eager_mode():
            self.assertIsInstance(inputs, base_layers._DeferredTensor)
            self.assertEqual(inputs.dtype.name, 'float32')
            self.assertEqual(inputs.shape.as_list(), [None, 32])

        x = core_layers.Dense(2)(inputs)
        if context.in_eager_mode():
            self.assertIsInstance(x, base_layers._DeferredTensor)
            self.assertEqual(x.dtype.name, 'float32')
            self.assertEqual(x.shape.as_list(), [None, 2])

        outputs = core_layers.Dense(4)(x)
        network = base_layers.Network(inputs, outputs)
        self.assertIsInstance(network, base_layers.Network)

        if context.in_eager_mode():
            # It should be possible to call such a network on EagerTensors.
            inputs = constant_op.constant(
                np.random.random((10, 32)).astype('float32'))
            outputs = network(inputs)
            self.assertEqual(outputs.shape.as_list(), [10, 4])
Beispiel #13
0
  def testInvalidNetworks(self):
    # redundant inputs
    x = base_layers.Input(shape=(32,))
    y = core_layers.Dense(2)(x)
    with self.assertRaises(ValueError):
      base_layers.Network([x, x], y)

    # inputs that don't come from Input
    x = array_ops.placeholder(dtype='float32', shape=(None, 32))
    y = core_layers.Dense(2)(x)
    with self.assertRaises(ValueError):
      base_layers.Network(x, y)

    # inputs that don't come from Input but have a layer history
    x = base_layers.Input(shape=(32,))
    x = core_layers.Dense(32)(x)
    y = core_layers.Dense(2)(x)
    with self.assertRaises(ValueError):
      base_layers.Network(x, y)

    # outputs that don't come from layers
    x = base_layers.Input(shape=(32,))
    y = core_layers.Dense(2)(x)
    y = 2 * y
    with self.assertRaises(ValueError):
      base_layers.Network(x, y)

    # disconnected graphs
    x1 = base_layers.Input(shape=(32,))
    x2 = base_layers.Input(shape=(32,))
    y = core_layers.Dense(2)(x1)
    with self.assertRaises(ValueError):
      base_layers.Network(x2, y)

    # redundant layer names
    x = base_layers.Input(shape=(32,))
    z = core_layers.Dense(2, name='dense')(x)
    y = core_layers.Dense(2, name='dense')(z)
    with self.assertRaises(ValueError):
      base_layers.Network(x, y)
Beispiel #14
0
 def testInputTensorWrapping(self):
   x = array_ops.placeholder(dtype='float32', shape=(None, 32))
   x = base_layers.Input(tensor=x)
   y = core_layers.Dense(2)(x)
   base_layers.Network(x, y)