Пример #1
0
 def __init__(self, l1=0., l2=0., **kwargs):
     super(ActivityRegularization,
           self).__init__(activity_regularizer=regularizers.L1L2(l1=l1,
                                                                 l2=l2),
                          **kwargs)
     self.supports_masking = True
     self.l1 = l1
     self.l2 = l2
Пример #2
0
 def kernel(self, train_columns_count):
     regression = tf.keras.Sequential([
         tf.keras.layers.Dense(self.dense_num_list[0], activation=self.ACTIVATION,
                               input_shape=[train_columns_count, ]),
         tf.keras.layers.Dropout(0.5),
         tf.keras.layers.Dense(self.dense_num_list[1],
                               activation=self.ACTIVATION,
                               regularizers=regularizers.L1L2(0.1, 0.1)),
         tf.keras.layers.Dropout(0.5),
         tf.keras.layers.Dense(1, activation="linear")
     ])
     optimizer = self.get_optimizer()
     regression.compile(loss=self.loss_class(), optimizer=optimizer, metrics=['mae', 'mse'])
     return regression