def reduce_sequence(self, inputs, sequence_lengths): axis = self.axis % inputs[0].shape.ndims if axis == 2: padded, combined_length = pad_n_with_identity( inputs, sequence_lengths) return self.reduce(padded), combined_length elif axis == 1: # Align all input tensors to the maximum combined length. combined_length = tf.add_n(sequence_lengths) maxlen = tf.reduce_max(combined_length) aligned = [align_in_time(x, maxlen) for x in inputs] current_length = None accumulator = None for elem, length in zip(aligned, sequence_lengths): # Make sure padding are 0 vectors as it is required for the next step. mask = tf.sequence_mask(length, maxlen=maxlen, dtype=elem.dtype) elem = elem * tf.expand_dims(mask, -1) if accumulator is None: accumulator = elem current_length = length else: accumulator += tensor_util.roll_sequence( elem, current_length) current_length += length return accumulator, combined_length else: raise ValueError( "Unsupported concatenation on axis {}".format(axis))
def testRollSequence(self): offset = [2, 3, 3] tensor = [[1, 2, 3, 0, 0, 6, 0], [1, 2, 3, 4, 0, 0, 0], [1, 0, 0, 0, 0, 0, 7]] expected = [[6, 0, 1, 2, 3, 0, 0], [0, 0, 0, 1, 2, 3, 4], [0, 0, 7, 1, 0, 0, 0]] rolled = tensor_util.roll_sequence(tensor, offset) self.assertAllEqual(expected, self.evaluate(rolled))