Exemple #1
0
    def __call__(self, inputs, sequence_lengths, scope=None):
        '''
        Create the variables and do the forward computation

        Args:
            inputs: the input to the layer as a
                [batch_size, max_length, dim] tensor
            sequence_length: the length of the input sequences as a
                [batch_size] tensor
            scope: The variable scope sets the namespace under which
                the variables created during this call will be stored.

        Returns:
            the output of the layer and the sequence lengths of the outputs
        '''


        with tf.variable_scope(scope or type(self).__name__):

            #apply blstm layer
            outputs = self.blstm(inputs, sequence_lengths)
            stacked_outputs, output_seq_lengths = ops.pyramid_stack(
                outputs,
                sequence_lengths,
                self.num_steps)


        return stacked_outputs, output_seq_lengths
Exemple #2
0
def projected_subsampling(inputs, input_seq_lengths, num_steps, name=None):
    '''
    apply projected subsampling, this is concatenating 2 timesteps,
    projecting to a lower dimensionality, applying batch_normalization
    and a relu layer

    args:
        inputs: a [batch_size x max_length x dim] input tensorflow
        input_seq_lengths: the input sequence lengths as a [batch_size] vector
        num_steps: the number of steps to concatenate
        is_training: bool training mode
        name: the name of the operation

    returns:
        - a [batch_size x ceil(max_length/2) x dim] output tensor
        - the output sequence lengths as a [batch_size] vector
    '''

    with tf.variable_scope(name or 'subsampling'):
        input_dim = int(inputs.get_shape()[2])

        #concatenate 2 timesteps
        stacked_inputs, output_seq_lengths = ops.pyramid_stack(
            inputs,
            input_seq_lengths,
            num_steps)

        #project back to the input dimension
        outputs = tf.contrib.layers.linear(stacked_inputs, input_dim)

        return outputs, output_seq_lengths
Exemple #3
0
def pblstm(inputs,
           sequence_length,
           is_training,
           num_units,
           num_steps=2,
           scope=None):
    '''
    a Pyramidal BLSTM layer

    args:
        inputs: the input to the layer as a
            [batch_size, max_length, dim] tensor
        sequence_length: the length of the input sequences as a
            [batch_size] tensor
        num_units: The number of units in the one directon
        num_steps: the number of time steps to concatenate
        layer_norm: whether layer normalization should be applied
        scope: The variable scope sets the namespace under which
            the variables created during this call will be stored.

    returns:
        - the PBLSTM outputs
        - the new sequence lengths
    '''

    with tf.variable_scope(scope or 'PBLSTM'):
        #apply blstm layer
        outputs = blstm(inputs=inputs,
                        sequence_length=sequence_length,
                        num_units=num_units,
                        is_training=is_training)

        #stack the outputs
        outputs, output_seq_lengths = ops.pyramid_stack(
            outputs, sequence_length, num_steps)

        return outputs, output_seq_lengths