예제 #1
0
def _add_conv_2d(model: tf.keras.Sequential, filters: int):
    model.add(tf.keras.layers.Conv2D(filters,
                                     (4, 4),
                                     strides=(2, 2),
                                     padding='same',
                                     use_bias=False,
                                     kernel_initializer=tf.random_normal_initializer(0.0, 0.02)))
예제 #2
0
 def _add_conv_2d(self,
                  model: tf.keras.Sequential,
                  filters: int,
                  strides: (int, int) = DISCRIMINATOR_STRIDE_SIZE) -> None:
     model.add(
         tf.keras.layers.Conv2D(
             filters,
             KERNEL_SIZE,
             strides=strides,
             padding=PADDING,
             use_bias=USE_BIAS,
             kernel_initializer=tf.random_normal_initializer(0.0, 0.02)))
예제 #3
0
def add_classifier(feature_extractor: tf.keras.Sequential, n_class: int, dropout: float=0.4) -> \
    tf.keras.Sequential:
    """
    Add classification layer to feature extraction model
    :param feature_extractor: model to extract features
    :param n_class: classes to differentiate
    :return: classifier model
    """
    feature_extractor.add(Flatten())
    feature_extractor.add(Dense(500, activation='relu'))
    feature_extractor.add(Dropout(dropout))
    feature_extractor.add(Dense(n_class, activation='softmax'))
    return feature_extractor
예제 #4
0
 def _add_activation(self, model: tf.keras.Sequential) -> None:
     model.add(tf.keras.layers.LeakyReLU(alpha=0.2))
예제 #5
0
 def _add_zeros_padding(self, model: tf.keras.Sequential) -> None:
     model.add(tf.keras.layers.ZeroPadding2D())
예제 #6
0
 def _add_dropout(self, model: tf.keras.Sequential) -> None:
     if ADD_DROPOUT_D:
         model.add(tf.keras.layers.Dropout(rate=DROPOUT_D_RATE))
예제 #7
0
 def _add_batch_norm(self, model: tf.keras.Sequential) -> None:
     model.add(tf.keras.layers.BatchNormalization(
         momentum=0.8,
         gamma_initializer=tf.random_normal_initializer(1.0, 0.02)))
예제 #8
0
 def _add_last_activation(self, model: tf.keras.Sequential) -> None:
     model.add(tf.keras.layers.Activation('tanh'))
예제 #9
0
 def _add_upsampling(self, model: tf.keras.Sequential) -> None:
     if UPSAMPLING:
         model.add(tf.keras.layers.UpSampling2D(
             interpolation=INTERPOLATION))
예제 #10
0
def _add_dropout(model: tf.keras.Sequential):
    model.add(tf.keras.layers.Dropout(rate=0.5))
예제 #11
0
def _add_discriminator_activation(model: tf.keras.Sequential):
    model.add(tf.keras.layers.LeakyReLU(alpha=0.2))
예제 #12
0
def _add_upsampling(model: tf.keras.Sequential):
    model.add(tf.keras.layers.UpSampling2D())
예제 #13
0
def _add_generator_activation(model: tf.keras.Sequential):
    model.add(tf.keras.layers.ReLU())
예제 #14
0
    def add_dense(network: tf.keras.Sequential,
                  layer_dims: List[int],
                  input_shape=None,
                  activation="relu",
                  first_l1: float = 0.0,
                  first_l2: float = 0.0,
                  p_dropout: float = None,
                  *args,
                  **kwargs):
        """
        Build a dense model with the given hidden state
        :param network: sequential Keras network
        :param layer_dims: list of hidden state dimensions
        :param first_l1: L1 kernel regulariser on the first layer
        :param first_l2: L2 kernel regulariser on the first layer
        :param p_dropout: dropout percentage after the first layer
        :param args: passed to Keras dense layer
        :param kwargs: passed to Keras dense layer
        :return: sequential Keras dense model
        :return:
        """

        # First layer
        if input_shape:
            network.add(
                tf.keras.layers.Dense(
                    layer_dims[0],
                    input_shape=input_shape,
                    kernel_regularizer=tf.keras.regularizers.L1L2(l1=first_l1,
                                                                  l2=first_l2),
                    bias_regularizer=tf.keras.regularizers.L1L2(l1=first_l1,
                                                                l2=first_l2),
                ))
        else:
            network.add(
                tf.keras.layers.Dense(
                    layer_dims[0],
                    kernel_regularizer=tf.keras.regularizers.L1L2(l1=first_l1,
                                                                  l2=first_l2),
                    bias_regularizer=tf.keras.regularizers.L1L2(l1=first_l1,
                                                                l2=first_l2),
                ))
        network.add(tf.keras.layers.Activation(activation))
        if p_dropout:
            network.add(tf.keras.layers.Dropout(p_dropout))
        # All the other feature_layers
        for cur_dim in layer_dims[1:]:
            network.add(tf.keras.layers.Dense(cur_dim, *args, **kwargs))
            network.add(tf.keras.layers.Activation(activation))
예제 #15
0
    def add_symmetric_autoencoder(network: tf.keras.Sequential,
                                  layer_dims: List[int],
                                  input_shape=None,
                                  activation="relu",
                                  *args,
                                  **kwargs) -> tf.keras.Sequential:
        """
        Build autoencoder where the hidden state dimensions of the en- and decoder are the same
        :param network: sequential Keras network
        :param layer_dims: list of hidden state dimensions
        :param args: passed to Keras dense layer
        :param kwargs: passed to Keras dense layer
        :return: sequential Keras autoencoder model
        """

        # First layer
        network.add(
            tf.keras.layers.Dense(layer_dims[0], input_shape=input_shape))
        network.add(tf.keras.layers.Activation(activation))
        # Encoder
        for cur_dim in layer_dims[1:]:
            network.add(tf.keras.layers.Dense(cur_dim, *args, **kwargs))
            network.add(tf.keras.layers.Activation(activation))
        # Decoder
        for cur_dim in reversed(layer_dims[:-1]):
            network.add(tf.keras.layers.Dense(cur_dim, *args, **kwargs))
            network.add(tf.keras.layers.Activation(activation))