Ejemplo n.º 1
0
  def __init__(self,
               feature_columns,
               trainable=True,
               name=None,
               **kwargs):
    """Creates a DenseFeatures object.

    Args:
      feature_columns: An iterable containing the FeatureColumns to use as
        inputs to your model. All items should be instances of classes derived
        from `DenseColumn` such as `numeric_column`, `embedding_column`,
        `bucketized_column`, `indicator_column`. If you have categorical
        features, you can wrap them with an `embedding_column` or
        `indicator_column`.
      trainable:  Boolean, whether the layer's variables will be updated via
        gradient descent during training.
      name: Name to give to the DenseFeatures.
      **kwargs: Keyword arguments to construct a layer.

    Raises:
      ValueError: if an item in `feature_columns` is not a `DenseColumn`.
    """
    super(DenseFeatures, self).__init__(
        feature_columns=feature_columns,
        trainable=trainable,
        name=name,
        **kwargs)
    self._state_manager = fc._StateManagerImplV2(self, self.trainable)  # pylint: disable=protected-access
Ejemplo n.º 2
0
 def __init__(self,
              units: int,
              linear_columns: List[FeatureColumn],
              dnn_columns: List[DenseColumn],
              hidden_units: List[int],
              dropout: float,
              name=None,
              **kwargs):
     super(WideDeepModel, self).__init__(name=name, **kwargs)
     self.units = units
     self.linear_columns = sorted(linear_columns,
                                  key=lambda column: column.name)
     self.dnn_columns = sorted(dnn_columns, key=lambda column: column.name)
     self.hidden_units = hidden_units
     self.dropout = dropout
     self.state_manager = _StateManagerImplV2(self,
                                              trainable=self.trainable)
     self.dense_layers = []
     self.dropout_layers = []
     for hidden_unit in hidden_units:
         dense_layer = tf.keras.layers.Dense(
             units=hidden_unit, activation=tf.keras.activations.relu)
         self.dense_layers.append(dense_layer)
         if dropout > 0:
             dropout_layer = tf.keras.layers.Dropout(rate=dropout)
             self.dropout_layers.append(dropout_layer)
     self.logits_layer = tf.keras.layers.Dense(units=units, activation=None)
Ejemplo n.º 3
0
    def __init__(self,
                 output_dim: int,
                 feature_columns: List[DenseColumn],
                 hidden_units: List[int],
                 activation_fn,
                 dropout: float,
                 batch_norm: bool = False,
                 name=None,
                 **kwargs):
        super(DNNModel, self).__init__(name=name, **kwargs)
        self.feature_columns = sorted(feature_columns, key=lambda column: column.name)
        self.output_dim = output_dim
        self.hidden_units = hidden_units
        self.activation_fn = activation_fn
        self.dropout = dropout
        self.batch_norm = batch_norm
        self.state_manager = _StateManagerImplV2(self, trainable=True)

        self.dense_layers = []
        self.dropout_layers = []
        self.batch_norm_layers = []
        for index, hidden_unit in enumerate(hidden_units):
            dense_layer = tf.keras.layers.Dense(units=hidden_unit, activation=activation_fn,
                                                name="dense_layer_%d" % index)
            self.dense_layers.append(dense_layer)
            if dropout:
                dropout_layer = tf.keras.layers.Dropout(rate=dropout, name="dropout_layer_%d" % index)
                self.dropout_layers.append(dropout_layer)
            if batch_norm:
                batch_norm_layer = tf.keras.layers.BatchNormalization(momentum=0.999,
                                                                      name="batch_norm_layer_%d" % index)
                self.batch_norm_layers.append(batch_norm_layer)

        self.logits_layer = tf.keras.layers.Dense(units=output_dim, activation=None, name="logits_layer")
Ejemplo n.º 4
0
 def __init__(self,
              output_dim: int,
              embedding_size: int,
              feature_columns: List[FeatureColumn],
              product_type: str,
              hidden_units: List[int],
              dropout: float,
              name=None,
              **kwargs):
     super(PNNModel, self).__init__(name=name, **kwargs)
     self.output_dim = output_dim
     self.embedding_size = embedding_size
     self.feature_columns = sorted(feature_columns,
                                   key=lambda column: column.name)
     self.product_type = product_type
     self.hidden_units = hidden_units
     self.dropout = dropout
     self.state_manager = _StateManagerImplV2(self,
                                              trainable=self.trainable)
     self.product_layer = None
     self.product_bias = None
     self.dense_layers = []
     self.dropout_layers = []
     for hidden_unit in hidden_units:
         dense_layer = tf.keras.layers.Dense(
             units=hidden_unit, activation=tf.keras.activations.relu)
         self.dense_layers.append(dense_layer)
         if dropout > 0:
             dropout_layer = tf.keras.layers.Dropout(rate=dropout)
             self.dropout_layers.append(dropout_layer)
     self.score_layer = tf.keras.layers.Dense(units=output_dim,
                                              activation=None)
Ejemplo n.º 5
0
 def __init__(self,
              latent_dim: int,
              columns: List[CategoricalColumn],
              name=None,
              **kwargs):
     super(FMModel, self).__init__(name=name, **kwargs)
     self.latent_dim = latent_dim
     self.columns = sorted(columns, key=lambda col: col.name)
     self.state_manager = _StateManagerImplV2(self, True)
     self.bias = None
Ejemplo n.º 6
0
 def __init__(self, latent_dim: int, user_column: CategoricalColumn,
              item_column: CategoricalColumn, l2_factor: float, name=None, **kwargs):
     super(SVDModel, self).__init__(name=name, **kwargs)
     self.average_score = None
     self.latent_dim = latent_dim
     self.user_column = user_column
     self.item_column = item_column
     self.l2_factor = l2_factor
     self.regularizer = tf.keras.regularizers.l2(l2_factor)
     self.state_manager = _StateManagerImplV2(self, self.trainable)
     self.user_bias = None
     self.item_bias = None
Ejemplo n.º 7
0
 def __init__(self,
              columns: List[FeatureColumn],
              cross_columns: List[CrossedColumn],
              l2_factor: float,
              name=None,
              **kwargs):
     super(Poly2Model, self).__init__(name=name, **kwargs)
     self.columns = sorted(columns, key=lambda col: col.name)
     self.l2_factor = l2_factor
     self.regularizer = tf.keras.regularizers.l2(l2_factor)
     self.cross_columns = sorted(cross_columns, key=lambda col: col.name)
     self.state_manager = _StateManagerImplV2(self, self.trainable)
     self.bias = None
Ejemplo n.º 8
0
 def __init__(self,
              item_embedding_size: int,
              item_id_column: CategoricalColumn,
              item_feature_columns: List[CategoricalColumn],
              target_id_column: CategoricalColumn,
              name=None,
              **kwargs):
     super(EgesModel, self).__init__(name=name, **kwargs)
     self.item_embedding_size = item_embedding_size
     self.item_id_column = item_id_column
     self.item_feature_columns = item_feature_columns
     self.target_id_column = target_id_column
     self.state_manager = _StateManagerImplV2(self, self.trainable)
Ejemplo n.º 9
0
 def __init__(self,
              split_count: int,
              columns: List[FeatureColumn],
              l2_factor: float,
              name=None,
              **kwargs):
     super(MLRModel, self).__init__(name=name, **kwargs)
     self.split_count = split_count
     self.columns = columns
     self.l2_factor = l2_factor
     self.regularizer = tf.keras.regularizers.l2(l2_factor)
     self.state_manager = _StateManagerImplV2(self, trainable=True)
     self.softmax_bias = None
     self.sigmoid_bias = None
Ejemplo n.º 10
0
 def __init__(self,
              columns: List[FeatureColumn],
              l2_factor: float,
              trainable=True,
              name=None,
              **kwargs):
     super(LinearLayer, self).__init__(trainable=trainable,
                                       name=name,
                                       **kwargs)
     self.columns = sorted(columns, key=lambda col: col.name)
     self.l2_factor = l2_factor
     self.regularizer = tf.keras.regularizers.l2(l2_factor)
     self.state_manager = _StateManagerImplV2(self, self.trainable)
     self.bias = None
Ejemplo n.º 11
0
 def __init__(self,
              item_embedding_size: int,
              item_id_column: CategoricalColumn,
              item_feature_columns: List[CategoricalColumn],
              trainable=True,
              name=None,
              **kwargs):
     super(ItemWeightedFeatureLayer, self).__init__(trainable=trainable,
                                                    name=name,
                                                    **kwargs)
     self.item_embedding_size = item_embedding_size
     self.item_id_column = item_id_column
     self.item_feature_columns = item_feature_columns
     self.state_manager = _StateManagerImplV2(self, self.trainable)
     self.feature_weights = None
Ejemplo n.º 12
0
 def __init__(self, average_score: float, latent_dim: int, user_column: CategoricalColumn,
              item_column: CategoricalColumn, user_history_columns: List[CategoricalColumn],
              l2_factor_bias: float, l2_factor_embedding: float, name=None, **kwargs):
     super(SVDPlusPlusModel, self).__init__(name=name, **kwargs)
     self.average_score = average_score
     self.latent_dim = latent_dim
     self.user_column = user_column
     self.item_column = item_column
     self.user_history_columns = user_history_columns
     self.l2_factor_bias = l2_factor_bias
     self.l2_factor_embedding = l2_factor_embedding
     self.regularizer_bias = tf.keras.regularizers.l2(l2_factor_bias)
     self.regularizer_embedding = tf.keras.regularizers.l2(l2_factor_embedding)
     self.state_manager = _StateManagerImplV2(self, True)
     self.user_bias = None
     self.item_bias = None
Ejemplo n.º 13
0
 def __init__(self,
              output_dim: int,
              feature_columns: List[DenseColumn],
              residual_units: List[int],
              name=None,
              **kwargs):
     super(DeepCrossingModel, self).__init__(name=name, **kwargs)
     self.output_dim = output_dim
     self.feature_columns = sorted(feature_columns,
                                   key=lambda column: column.name)
     self.residual_units = residual_units
     self.stack_layer = tf.keras.layers.Lambda(
         lambda tensors: tf.concat(tensors, axis=1), name="StackLayer")
     self.residual_layer = ResidualLayer(hidden_units=residual_units)
     self.score_layer = tf.keras.layers.Dense(units=output_dim,
                                              name="ScoreLayer")
     self.state_manager = _StateManagerImplV2(self,
                                              trainable=self.trainable)
Ejemplo n.º 14
0
 def __init__(self, output_dim: int, user_columns: List[DenseColumn], item_columns: List[DenseColumn],
              hidden_units: List[int], dropout: float, name=None, **kwargs):
     super(NeuralCfModel, self).__init__(name=name, **kwargs)
     self.output_dim = output_dim
     self.user_columns = sorted(user_columns, key=lambda column: column.name)
     self.item_columns = sorted(item_columns, key=lambda column: column.name)
     self.hidden_units = hidden_units
     self.dropout = dropout
     self.state_manager = _StateManagerImplV2(self, trainable=self.trainable)
     self.gmf_layer = tf.keras.layers.Lambda(lambda inputs: tf.math.multiply(inputs[0], inputs[1]), name="GMFLayer")
     self.denser_layers = []
     self.dropout_layers = []
     for hidden_unit in self.hidden_units:
         dense_layer = tf.keras.layers.Dense(units=hidden_unit, activation=tf.keras.activations.relu)
         self.denser_layers.append(dense_layer)
         if self.dropout > 0:
             dropout_layer = tf.keras.layers.Dropout(rate=dropout)
             self.dropout_layers.append(dropout_layer)
     self.score_layer = tf.keras.layers.Dense(units=output_dim, activation=None, name="ScoreLayer")