Example #1
0
def get_keras_model(num_classes,
                    input_length,
                    use_batchnorm=True,
                    l2=1e-5,
                    num_clusters=None,
                    alpha_init=None):
    """Make a model."""
    assert not num_clusters or not alpha_init
    model = tf.keras.models.Sequential()
    model.add(tf.keras.Input((input_length, )))  # Input is [bs, input_length]
    trill_layer = hub.KerasLayer(
        handle=
        'https://tfhub.dev/google/nonsemantic-speech-benchmark/trill-distilled/3',
        trainable=True,
        arguments={'sample_rate': tf.constant(16000, tf.int32)},
        output_key='embedding',
        output_shape=[None, 2048])
    assert trill_layer.trainable_variables
    model.add(trill_layer)
    if num_clusters and num_clusters > 0:
        model.add(NetVLAD(num_clusters=num_clusters))
        if use_batchnorm:
            model.add(tf.keras.layers.BatchNormalization())
    elif alpha_init is not None:
        model.add(
            autopool.AutoPool(axis=1, alpha_init=alpha_init, trainable=False))
    else:
        model.add(tf.keras.layers.Lambda(lambda x: tf.reduce_mean(x, axis=1)))
    model.add(
        tf.keras.layers.Dense(
            num_classes, kernel_regularizer=tf.keras.regularizers.l2(l=l2)))

    return model
Example #2
0
 def test_keras_model_has_no_trainable_vars(self):
   """Test that pooling variable is properly trainable."""
   m = tf.keras.models.Sequential(
       [tf.keras.Input(shape=(4, 5)),
        autopool.AutoPool(axis=1, trainable=False)]
   )
   m.build()
   self.assertEmpty(m.trainable_variables)
Example #3
0
  def test_limiting_behavior(self, alpha_init, agg_func):
    """Test semantics of alpha.

    When `alpha` = 0, it reduces to an unweighted mean; when `alpha` = 1,
    it simplifies to soft-max pooling; and when `alpha` -> inf, it approaches
    the max operator, and `alpha` -> -inf, it approachs the min operator.

    Args:
      alpha_init: The alpha parameters.
      agg_func: The expected aggregation function.
    """
    a = tf.random.uniform(shape=(2, 3, 1), seed=12)
    actual = autopool.AutoPool(axis=1, alpha_init=alpha_init)(a, keepdims=True)
    expected = agg_func(a, axis=1, keepdims=True)
    self.assertAllClose(actual, expected)
Example #4
0
def get_keras_model(num_classes, use_batchnorm=True, l2=1e-5,
                    num_clusters=None, alpha_init=None):
  """Make a model."""
  model = tf.keras.models.Sequential()
  if num_clusters and num_clusters > 0:
    model.add(NetVLAD(num_clusters=num_clusters))
    if use_batchnorm:
      model.add(tf.keras.layers.BatchNormalization())
  elif alpha_init is not None:
    model.add(autopool.AutoPool(axis=1, alpha_init=alpha_init, trainable=False))
  else:
    model.add(tf.keras.layers.Lambda(lambda x: tf.reduce_mean(x, axis=1)))
  model.add(tf.keras.layers.Dense(
      num_classes, kernel_regularizer=tf.keras.regularizers.l2(l=l2)))

  return model
    def test_keras_model_has_trainable_vars(self):
        """Test that pooling variable is properly trainable."""
        m = tf.keras.models.Sequential(
            [tf.keras.Input(shape=(4, 5)),
             autopool.AutoPool(axis=1)])
        m.build()
        self.assertIsNotNone(m.trainable_variables)
        assert len(m.trainable_variables) == 1

        # Run a fake training step and check that values after the step are
        # different. Access through `average_alpha` to check that it works.
        original_var_val = m.get_layer(index=0).average_alpha.numpy()
        with tf.GradientTape() as tape:
            o = m(tf.random.uniform(shape=(3, 4, 5)), training=True)
            o.shape.assert_is_compatible_with((3, 5))
            loss_value = tf.keras.losses.CategoricalCrossentropy(
                from_logits=True)(y_true=tf.ones_like(o), y_pred=o)
        grads = tape.gradient(loss_value, m.trainable_variables)
        tf.keras.optimizers.Adam(learning_rate=1.0).apply_gradients(
            zip(grads, m.trainable_variables))
        post_training_var_val = m.get_layer(index=0).average_alpha.numpy()

        self.assertNotAllClose(original_var_val, post_training_var_val)
Example #6
0
 def test_average_alpha(self):
   a = tf.random.uniform(shape=(3, 4, 5), seed=123)
   layer = autopool.AutoPool(axis=1)
   layer(a)  # Triggers layer build.
   self.assertIsNotNone(layer.average_alpha)
   self.assertEqual(layer.average_alpha.shape, [])