def __init__(self,
               depth,
               bias=LSTMBiasInit,
               initializer=block_util.RsqrtInitializer(),
               name=None):
    super(LSTM, self).__init__([depth], name)

    with self._BlockScope():
      self._depth = depth
      self._nn = blocks_std.NN(
          4 * depth, bias=bias, act=None, initializer=initializer)
      self._hidden_linear = blocks_std.Linear(
          4 * depth, initializer=initializer)
Пример #2
0
  def testLinearShared(self):
    # Create a linear map which is applied twice on different inputs
    # (i.e. the weights of the map are shared).
    linear_map = blocks_std.Linear(6)
    x1 = tf.random_normal(shape=[1, 5])
    x2 = tf.random_normal(shape=[1, 5])
    xs = x1 + x2

    # Apply the transform with the same weights.
    y1 = linear_map(x1)
    y2 = linear_map(x2)
    ys = linear_map(xs)

    with self.test_session() as sess:
      # Initialize all the variables of the graph.
      tf.global_variables_initializer().run()

      y1_res, y2_res, ys_res = sess.run([y1, y2, ys])
      self.assertAllClose(y1_res + y2_res, ys_res)
Пример #3
0
 def testLinear(self):
   m = blocks_std.Linear(10)
   x = tf.placeholder(dtype=tf.float32, shape=[8, 9])
   y = m(x)
   self.assertEqual(m._matrix.get_shape(), [9, 10])
   self.assertIs(x, self.CheckLinear(y, m))