Example #1
0
  def _LSTMLayer(self, prev_layer, direction, dim, summarize, depth, name):
    """Adds an LSTM layer with the given pre-parsed attributes.

    Always maps 4-D to 4-D regardless of summarize.
    Args:
      prev_layer: Input tensor.
      direction:  'forward' 'backward' or 'bidirectional'
      dim:        'x' or 'y', dimension to consider as time.
      summarize:  True if we are to return only the last timestep.
      depth:      Output depth.
      name:       Some string naming the op.

    Returns:
      Output tensor.
    """
    # If the target dimension is y, we need to transpose.
    if dim == 'x':
      lengths = self.GetLengths(2, 1)
      inputs = prev_layer
    else:
      lengths = self.GetLengths(1, 1)
      inputs = tf.transpose(prev_layer, [0, 2, 1, 3], name=name + '_ytrans_in')
    input_batch = shapes.tensor_dim(inputs, 0)
    num_slices = shapes.tensor_dim(inputs, 1)
    num_steps = shapes.tensor_dim(inputs, 2)
    input_depth = shapes.tensor_dim(inputs, 3)
    # Reshape away the other dimension.
    inputs = tf.reshape(
        inputs, [-1, num_steps, input_depth], name=name + '_reshape_in')
    # We need to replicate the lengths by the size of the other dimension, and
    # any changes that have been made to the batch dimension.
    tile_factor = tf.to_float(input_batch *
                              num_slices) / tf.to_float(tf.shape(lengths)[0])
    lengths = tf.tile(lengths, [tf.cast(tile_factor, tf.int32)])
    lengths = tf.cast(lengths, tf.int64)
    outputs = nn_ops.rnn_helper(
        inputs,
        lengths,
        cell_type='lstm',
        num_nodes=depth,
        direction=direction,
        name=name,
        stddev=0.1)
    # Output depth is doubled if bi-directional.
    if direction == 'bidirectional':
      output_depth = depth * 2
    else:
      output_depth = depth
    # Restore the other dimension.
    if summarize:
      outputs = tf.slice(
          outputs, [0, num_steps - 1, 0], [-1, 1, -1], name=name + '_sum_slice')
      outputs = tf.reshape(
          outputs, [input_batch, num_slices, 1, output_depth],
          name=name + '_reshape_out')
    else:
      outputs = tf.reshape(
          outputs, [input_batch, num_slices, num_steps, output_depth],
          name=name + '_reshape_out')
    if dim == 'y':
      outputs = tf.transpose(outputs, [0, 2, 1, 3], name=name + '_ytrans_out')
    return outputs
Example #2
0
    def _LSTMLayer(self, prev_layer, direction, dim, summarize, depth, name):
        """Adds an LSTM layer with the given pre-parsed attributes.

    Always maps 4-D to 4-D regardless of summarize.
    Args:
      prev_layer: Input tensor.
      direction:  'forward' 'backward' or 'bidirectional'
      dim:        'x' or 'y', dimension to consider as time.
      summarize:  True if we are to return only the last timestep.
      depth:      Output depth.
      name:       Some string naming the op.

    Returns:
      Output tensor.
    """
        # If the target dimension is y, we need to transpose.
        if dim == 'x':
            lengths = self.GetLengths(2, 1)
            inputs = prev_layer
        else:
            lengths = self.GetLengths(1, 1)
            inputs = tf.transpose(prev_layer, [0, 2, 1, 3],
                                  name=name + '_ytrans_in')
        input_batch = shapes.tensor_dim(inputs, 0)
        num_slices = shapes.tensor_dim(inputs, 1)
        num_steps = shapes.tensor_dim(inputs, 2)
        input_depth = shapes.tensor_dim(inputs, 3)
        # Reshape away the other dimension.
        inputs = tf.reshape(inputs, [-1, num_steps, input_depth],
                            name=name + '_reshape_in')
        # We need to replicate the lengths by the size of the other dimension, and
        # any changes that have been made to the batch dimension.
        tile_factor = tf.to_float(input_batch * num_slices) / tf.to_float(
            tf.shape(lengths)[0])
        lengths = tf.tile(lengths, [tf.cast(tile_factor, tf.int32)])
        lengths = tf.cast(lengths, tf.int64)
        outputs = nn_ops.rnn_helper(inputs,
                                    lengths,
                                    cell_type='lstm',
                                    num_nodes=depth,
                                    direction=direction,
                                    name=name,
                                    stddev=0.1)
        # Output depth is doubled if bi-directional.
        if direction == 'bidirectional':
            output_depth = depth * 2
        else:
            output_depth = depth
        # Restore the other dimension.
        if summarize:
            outputs = tf.slice(outputs, [0, num_steps - 1, 0], [-1, 1, -1],
                               name=name + '_sum_slice')
            outputs = tf.reshape(outputs,
                                 [input_batch, num_slices, 1, output_depth],
                                 name=name + '_reshape_out')
        else:
            outputs = tf.reshape(
                outputs, [input_batch, num_slices, num_steps, output_depth],
                name=name + '_reshape_out')
        if dim == 'y':
            outputs = tf.transpose(outputs, [0, 2, 1, 3],
                                   name=name + '_ytrans_out')
        return outputs