def _prepare_sparse_dense_dropout_layers(self, name: Text, drop_rate: float) -> None: self._tf_layers[f"sparse_input_dropout.{name}"] = layers.SparseDropout( rate=drop_rate) self._tf_layers[ f"dense_input_dropout.{name}"] = tf.keras.layers.Dropout( rate=drop_rate)
def __init__(self, dense_dim: List[int], model_dim: int, reg_lambda: float, drop_rate: float): super(InputLayer, self).__init__() self.dense_layers = [ tf.keras.layers.Dense(i, activation='relu') for i in dense_dim ] self.sparse_dropout_layer = layers.SparseDropout(drop_rate) self.sparse_to_dense_layer = layers.DenseForSparse( units=dense_dim[0], reg_lambda=reg_lambda) self.output_layer = tf.keras.layers.Dense(model_dim, activation='relu')
def _prepare_layers_for_sparse_tensors(self, attribute: Text, feature_type: Text, config: Dict[Text, Any]) -> None: """Sets up sparse tensor pre-processing before combining with dense ones.""" # For optionally applying dropout to sparse tensors if config[SPARSE_INPUT_DROPOUT]: self._tf_layers[self.SPARSE_DROPOUT] = layers.SparseDropout( rate=config[DROP_RATE]) # For converting sparse tensors to dense self._tf_layers[self.SPARSE_TO_DENSE] = layers.DenseForSparse( name=f"sparse_to_dense.{attribute}_{feature_type}", units=config[DENSE_DIMENSION][attribute], reg_lambda=config[REGULARIZATION_CONSTANT], ) # For optionally apply dropout to sparse tensors after they're converted to # dense tensors. if config[DENSE_INPUT_DROPOUT]: self._tf_layers[self.DENSE_DROPOUT] = tf.keras.layers.Dropout( rate=config[DROP_RATE])