Beispiel #1
0
def composite_layer(inputs, mask, hparams, for_output=False):
    """Composite layer."""
    x = inputs

    # Applies ravanbakhsh on top of each other.
    if hparams.composite_layer_type == "ravanbakhsh":
        for layer in xrange(hparams.layers_per_layer):
            with tf.variable_scope(".%d" % layer):
                x = common_layers.ravanbakhsh_set_layer(
                    hparams.hidden_size,
                    x,
                    mask=mask,
                    sequential=for_output,
                    dropout=hparams.relu_dropout)

    # Transforms elements to get a context, and then uses this in a final layer.
    elif hparams.composite_layer_type == "reembedding":
        # Transform elements n times and then pool.
        for layer in xrange(hparams.layers_per_layer):
            with tf.variable_scope("sub_layer_%d" % layer):
                x = common_layers.linear_set_layer(
                    hparams.hidden_size, x, dropout=hparams.relu_dropout)
                if for_output:
                    context = common_layers.running_global_pool_1d(x)
                else:
                    context = common_layers.global_pool_1d(x, mask=mask)
        # Final layer.
        x = common_layers.linear_set_layer(hparams.hidden_size,
                                           x,
                                           context=context,
                                           dropout=hparams.relu_dropout)
    return x
  def testGlobalPool1d(self):
    x1 = np.random.rand(5, 4, 11)
    no_mask = np.ones((5, 4))
    full_mask = np.zeros((5, 4))

    x1_ = tf.Variable(x1, dtype=tf.float32)
    no_mask_ = tf.Variable(no_mask, dtype=tf.float32)
    full_mask_ = tf.Variable(full_mask, dtype=tf.float32)

    none_mask_max = common_layers.global_pool_1d(x1_)
    no_mask_max = common_layers.global_pool_1d(x1_, mask=no_mask_)
    result1 = tf.reduce_sum(none_mask_max - no_mask_max)

    full_mask_max = common_layers.global_pool_1d(x1_, mask=full_mask_)
    result2 = tf.reduce_sum(full_mask_max)

    none_mask_avr = common_layers.global_pool_1d(x1_, "AVR")
    no_mask_avr = common_layers.global_pool_1d(x1_, "AVR", no_mask_)
    result3 = tf.reduce_sum(none_mask_avr - no_mask_avr)

    full_mask_avr = common_layers.global_pool_1d(x1_, "AVR", full_mask_)
    result4 = tf.reduce_sum(full_mask_avr)

    self.evaluate(tf.global_variables_initializer())
    actual = self.evaluate([result1, result2, result3, result4])
    self.assertAllEqual(actual[:3], [0.0, 0.0, 0.0])
Beispiel #3
0
    def testGlobalPool1d(self):
        x1 = np.random.rand(5, 4, 11)
        no_mask = np.ones((5, 4))
        full_mask = np.zeros((5, 4))

        with self.test_session() as session:
            x1_ = tf.Variable(x1, dtype=tf.float32)
            no_mask_ = tf.Variable(no_mask, dtype=tf.float32)
            full_mask_ = tf.Variable(full_mask, dtype=tf.float32)

            none_mask_max = common_layers.global_pool_1d(x1_)
            no_mask_max = common_layers.global_pool_1d(x1_, mask=no_mask_)
            result1 = tf.reduce_sum(none_mask_max - no_mask_max)

            full_mask_max = common_layers.global_pool_1d(x1_, mask=full_mask_)
            result2 = tf.reduce_sum(full_mask_max)

            none_mask_avr = common_layers.global_pool_1d(x1_, "AVR")
            no_mask_avr = common_layers.global_pool_1d(x1_, "AVR", no_mask_)
            result3 = tf.reduce_sum(none_mask_avr - no_mask_avr)

            full_mask_avr = common_layers.global_pool_1d(
                x1_, "AVR", full_mask_)
            result4 = tf.reduce_sum(full_mask_avr)

            session.run(tf.global_variables_initializer())
            actual = session.run([result1, result2, result3, result4])
        self.assertAllEqual(actual[:3], [0.0, 0.0, 0.0])