Beispiel #1
0
    def call(self, features, training=None):
        """Returns sequence input corresponding to the `feature_columns`.

    Args:
      features: A dict mapping keys to tensors.
      training: Python boolean or None, indicating whether to the layer is being
        run in training mode. This argument is passed to the call method of any
        `FeatureColumn` that takes a `training` argument. For example, if a
        `FeatureColumn` performed dropout, the column could expose a `training`
        argument to control whether the dropout should be applied. If `None`,
        defaults to `tf.keras.backend.learning_phase()`.


    Returns:
      An `(input_layer, sequence_length)` tuple where:
      - input_layer: A float `Tensor` of shape `[batch_size, T, D]`.
          `T` is the maximum sequence length for this batch, which could differ
          from batch to batch. `D` is the sum of `num_elements` for all
          `feature_columns`.
      - sequence_length: An int `Tensor` of shape `[batch_size]`. The sequence
          length for each example.

    Raises:
      ValueError: If features are not a dictionary.
    """
        if not isinstance(features, dict):
            raise ValueError('We expected a dictionary here. Instead we got: ',
                             features)
        if training is None:
            training = backend.learning_phase()
        transformation_cache = fc.FeatureTransformationCache(features)
        output_tensors = []
        sequence_lengths = []

        for column in self._feature_columns:
            with backend.name_scope(column.name):
                try:
                    dense_tensor, sequence_length = column.get_sequence_dense_tensor(
                        transformation_cache,
                        self._state_manager,
                        training=training)
                except TypeError:
                    dense_tensor, sequence_length = column.get_sequence_dense_tensor(
                        transformation_cache, self._state_manager)
                # Flattens the final dimension to produce a 3D Tensor.
                output_tensors.append(
                    self._process_dense_tensor(column, dense_tensor))
                sequence_lengths.append(sequence_length)

        # Check and process sequence lengths.
        fc._verify_static_batch_size_equality(sequence_lengths,
                                              self._feature_columns)
        sequence_length = _assert_all_equal_and_return(sequence_lengths)

        return self._verify_and_concat_tensors(output_tensors), sequence_length
Beispiel #2
0
    def call(self, features):
        """Returns sequence input corresponding to the `feature_columns`.

    Args:
      features: A dict mapping keys to tensors.

    Returns:
      An `(input_layer, sequence_length)` tuple where:
      - input_layer: A float `Tensor` of shape `[batch_size, T, D]`.
          `T` is the maximum sequence length for this batch, which could differ
          from batch to batch. `D` is the sum of `num_elements` for all
          `feature_columns`.
      - sequence_length: An int `Tensor` of shape `[batch_size]`. The sequence
          length for each example.

    Raises:
      ValueError: If features are not a dictionary.
    """
        if not isinstance(features, dict):
            raise ValueError('We expected a dictionary here. Instead we got: ',
                             features)
        transformation_cache = fc.FeatureTransformationCache(features)
        output_tensors = []
        sequence_lengths = []

        for column in self._feature_columns:
            with ops.name_scope(column.name):
                dense_tensor, sequence_length = column.get_sequence_dense_tensor(
                    transformation_cache, self._state_manager)
                # Flattens the final dimension to produce a 3D Tensor.
                output_tensors.append(
                    self._process_dense_tensor(column, dense_tensor))
                sequence_lengths.append(sequence_length)

        # Check and process sequence lengths.
        fc._verify_static_batch_size_equality(sequence_lengths,
                                              self._feature_columns)
        sequence_length = _assert_all_equal_and_return(sequence_lengths)

        return self._verify_and_concat_tensors(output_tensors), sequence_length
  def call(self, features):
    """Returns sequence input corresponding to the `feature_columns`.

    Args:
      features: A dict mapping keys to tensors.

    Returns:
      An `(input_layer, sequence_length)` tuple where:
      - input_layer: A float `Tensor` of shape `[batch_size, T, D]`.
          `T` is the maximum sequence length for this batch, which could differ
          from batch to batch. `D` is the sum of `num_elements` for all
          `feature_columns`.
      - sequence_length: An int `Tensor` of shape `[batch_size]`. The sequence
          length for each example.

    Raises:
      ValueError: If features are not a dictionary.
    """
    if not isinstance(features, dict):
      raise ValueError('We expected a dictionary here. Instead we got: ',
                       features)
    transformation_cache = fc.FeatureTransformationCache(features)
    output_tensors = []
    sequence_lengths = []

    for column in self._feature_columns:
      with ops.name_scope(column.name):
        dense_tensor, sequence_length = column.get_sequence_dense_tensor(
            transformation_cache, self._state_manager)
        # Flattens the final dimension to produce a 3D Tensor.
        output_tensors.append(self._process_dense_tensor(column, dense_tensor))
        sequence_lengths.append(sequence_length)

    # Check and process sequence lengths.
    fc._verify_static_batch_size_equality(sequence_lengths,
                                          self._feature_columns)
    sequence_length = _assert_all_equal_and_return(sequence_lengths)

    return self._verify_and_concat_tensors(output_tensors), sequence_length
 def _verify_and_concat_tensors(self, output_tensors):
     """Verifies and concatenates the dense output of several columns."""
     feature_column_v2._verify_static_batch_size_equality(  # pylint: disable=protected-access
         output_tensors, self._feature_columns)
     return array_ops.concat(output_tensors, -1)
Beispiel #5
0
 def _verify_and_concat_tensors(self, output_tensors):
     """Verifies and concatenates the dense output of several columns."""
     fc._verify_static_batch_size_equality(output_tensors,
                                           self._feature_columns)
     return array_ops.concat(output_tensors, -1)