def __call__(self, input_layer, num_units, bias=True, name=PROVIDED, stddev=None, init=None, lengths=None):
        """Creates an unrolled GRU to process sequence data.

    The initial state is drawn from the bookkeeper's recurrent state and if it
    supports state saving, then it is saved.

    Args:
      input_layer: PrettyTensor (provided).
      num_units: Number of units in the hidden states.
      bias: Whether or not to use a bias.
      name: The name of this layer.
      stddev: Standard deviation for Gaussian initialization of parameters.
      init: A tf.*Initializer that is used to initialize the variables.
      lengths: An optional Tensor that encodes a length for each item in the
        minibatch. This is used to truncate computation.
    Returns:
      A sequence with the result at each timestep.
    Raises:
      ValueError: if head is not a sequence, the shape is not rank 2 or the
        number of nodes (second dim) is not known.
    """
        if not self.template:
            gru_template = prettytensor.template("input", input_layer.bookkeeper)
            self.template = gru_template.gru_cell(
                prettytensor.UnboundVariable("state"), num_units, bias=bias, stddev=stddev, init=init
            ).as_fn("input", "state")

        batch_size = input_layer.shape[0]
        return unroll_state_saver(input_layer, name, [(batch_size, num_units)], self.template, lengths)
    def __call__(self,
                 input_layer,
                 num_units,
                 bias=True,
                 peephole=True,
                 name=PROVIDED,
                 stddev=None,
                 init=None,
                 lengths=None):
        """Creates an unrolled LSTM to process sequence data.

    The initial state is drawn from the bookkeeper's recurrent state and if it
    supports state saving, then it is saved.

    Args:
      input_layer: PrettyTensor (provided).
      num_units: Number of nodes in the hidden states and the output size.
      bias: Whether or not to use a bias.
      peephole: Whether to use peephole connections.
      name: The name of this layer.
      stddev: Standard deviation for Gaussian initialization of parameters.
      init: A tf.*Initializer that is used to initialize the variables.
      lengths: An optional Tensor that encodes a length for each item in the
        minibatch. This is used to truncate computation.
    Returns:
      A sequence with the result at each timestep.
    Raises:
      ValueError: if head is not a sequence, the shape is not rank 2 or the
        number of nodes (second dim) is not known.
    """
        if not self.template:
            lstm_template = prettytensor.template('input',
                                                  input_layer.bookkeeper)
            names = ['c', 'h']
            states = [prettytensor.UnboundVariable(n) for n in names]
            # TODO(eiderman): Move this to pt.make_template() when it is ready.
            self.template = lstm_template.lstm_cell(states=states,
                                                    num_units=num_units,
                                                    bias=bias,
                                                    peephole=peephole,
                                                    stddev=stddev,
                                                    init=init).as_fn(
                                                        'input', *names)

        batch_size = input_layer.shape[0]
        state_shapes = [[batch_size, num_units], [batch_size, num_units]]
        return unroll_state_saver(input_layer, name, state_shapes,
                                  self.template, lengths)
  def __call__(self,
               input_layer,
               num_units,
               bias=True,
               peephole=True,
               name=PROVIDED,
               stddev=None,
               init=None,
               lengths=None):
    """Creates an unrolled LSTM to process sequence data.

    The initial state is drawn from the bookkeeper's recurrent state and if it
    supports state saving, then it is saved.

    Args:
      input_layer: PrettyTensor (provided).
      num_units: Number of nodes in the hidden states and the output size.
      bias: Whether or not to use a bias.
      peephole: Whether to use peephole connections.
      name: The name of this layer.
      stddev: Standard deviation for Gaussian initialization of parameters.
      init: A tf.*Initializer that is used to initialize the variables.
      lengths: An optional Tensor that encodes a length for each item in the
        minibatch. This is used to truncate computation.
    Returns:
      A sequence with the result at each timestep.
    Raises:
      ValueError: if head is not a sequence, the shape is not rank 2 or the
        number of nodes (second dim) is not known.
    """
    if not self.template:
      lstm_template = prettytensor.template('input', input_layer.bookkeeper)
      names = ['c', 'h']
      states = [prettytensor.UnboundVariable(n) for n in names]
      # TODO(eiderman): Move this to pt.make_template() when it is ready.
      self.template = lstm_template.lstm_cell(states=states,
                                              num_units=num_units,
                                              bias=bias,
                                              peephole=peephole,
                                              stddev=stddev,
                                              init=init).as_fn('input', *names)

    batch_size = input_layer.shape[0]
    state_shapes = [[batch_size, num_units],
                    [batch_size, num_units]]
    return unroll_state_saver(input_layer, name, state_shapes, self.template,
                              lengths)
      def __call__(self,
                   input_layer,
                   *args,
                   **kwargs):
        name = kwargs.pop('name', 'sequence')
        lengths = kwargs.pop('lengths', None)
        state_tuples = state_fn(input_layer, *args, **kwargs)
        if not self.template:
          template = prettytensor.template('input', input_layer.bookkeeper)
          states = [prettytensor.UnboundVariable(s[0]) for s in state_tuples]
          self.template = cell_fn(template, states, *args, **kwargs).as_fn(
              'input', *[s[0] for s in state_tuples])

        batch_size = input_layer.shape[0]
        state_shapes = [[batch_size, s[1]] for s in state_tuples]
        return unroll_state_saver(
            input_layer, name, state_shapes, self.template, lengths)
Example #5
0
            def __call__(self, input_layer, *args, **kwargs):
                name = kwargs.pop('name', 'sequence')
                lengths = kwargs.pop('lengths', None)
                state_tuples = state_fn(input_layer, *args, **kwargs)
                if not self.template:
                    template = prettytensor.template('input',
                                                     input_layer.bookkeeper)
                    states = [
                        prettytensor.UnboundVariable(s[0])
                        for s in state_tuples
                    ]
                    self.template = cell_fn(template, states, *args,
                                            **kwargs).as_fn(
                                                'input',
                                                *[s[0] for s in state_tuples])

                batch_size = input_layer.shape[0]
                state_shapes = [[batch_size, s[1]] for s in state_tuples]
                return unroll_state_saver(input_layer, name, state_shapes,
                                          self.template, lengths)
    def __call__(self,
                 input_layer,
                 num_units,
                 bias=True,
                 name=PROVIDED,
                 stddev=None,
                 init=None,
                 lengths=None):
        """Creates an unrolled GRU to process sequence data.

    The initial state is drawn from the bookkeeper's recurrent state and if it
    supports state saving, then it is saved.

    Args:
      input_layer: PrettyTensor (provided).
      num_units: Number of units in the hidden states.
      bias: Whether or not to use a bias.
      name: The name of this layer.
      stddev: Standard deviation for Gaussian initialization of parameters.
      init: A tf.*Initializer that is used to initialize the variables.
      lengths: An optional Tensor that encodes a length for each item in the
        minibatch. This is used to truncate computation.
    Returns:
      A sequence with the result at each timestep.
    Raises:
      ValueError: if head is not a sequence, the shape is not rank 2 or the
        number of nodes (second dim) is not known.
    """
        if not self.template:
            gru_template = prettytensor.template('input',
                                                 input_layer.bookkeeper)
            self.template = gru_template.gru_cell(
                prettytensor.UnboundVariable('state'),
                num_units,
                bias=bias,
                stddev=stddev,
                init=init).as_fn('input', 'state')

        batch_size = input_layer.shape[0]
        return unroll_state_saver(input_layer, name, [(batch_size, num_units)],
                                  self.template, lengths)