Пример #1
0
 def testConvGRU(self):
   x = np.random.rand(5, 7, 3, 11)
   y = common_layers.conv_gru(tf.constant(x, dtype=tf.float32), (1, 3), 11)
   z = common_layers.conv_gru(
       tf.constant(x, dtype=tf.float32), (1, 3), 11, padding="LEFT")
   self.evaluate(tf.global_variables_initializer())
   res1 = self.evaluate(y)
   res2 = self.evaluate(z)
   self.assertEqual(res1.shape, (5, 7, 3, 11))
   self.assertEqual(res2.shape, (5, 7, 3, 11))
Пример #2
0
 def testConvGRU(self):
   x = np.random.rand(5, 7, 3, 11)
   y = common_layers.conv_gru(tf.constant(x, dtype=tf.float32), (1, 3), 11)
   z = common_layers.conv_gru(
       tf.constant(x, dtype=tf.float32), (1, 3), 11, padding="LEFT")
   self.evaluate(tf.global_variables_initializer())
   res1 = self.evaluate(y)
   res2 = self.evaluate(z)
   self.assertEqual(res1.shape, (5, 7, 3, 11))
   self.assertEqual(res2.shape, (5, 7, 3, 11))
Пример #3
0
 def step(state, inp):  # pylint: disable=missing-docstring
     x = tf.nn.dropout(state, 1.0 - hparams.dropout)
     for layer in range(hparams.num_hidden_layers):
         x = common_layers.conv_gru(
             x, (hparams.kernel_height, hparams.kernel_width),
             hparams.hidden_size,
             name="cgru_%d" % layer)
     # Padding input is zeroed-out in the modality, we check this by summing.
     padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 0.00001)
     new_state = tf.where(padding_inp, state, x)  # No-op where inp is padding.
     return new_state
Пример #4
0
 def step(state, inp):  # pylint: disable=missing-docstring
   x = tf.nn.dropout(state, 1.0 - hparams.dropout)
   for layer in xrange(hparams.num_hidden_layers):
     x = common_layers.conv_gru(
         x, (hparams.kernel_height, hparams.kernel_width),
         hparams.hidden_size,
         name="cgru_%d" % layer)
   # Padding input is zeroed-out in the modality, we check this by summing.
   padding_inp = tf.less(tf.reduce_sum(tf.abs(inp), axis=[1, 2]), 0.00001)
   new_state = tf.where(padding_inp, state, x)  # No-op where inp is padding.
   return new_state