예제 #1
0
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function,
                                 dropout_rate, is_training, name):
    layers = [input_layer]

    # batchnorm
    if batchnorm:
        layers.append(
            tf.layers.batch_normalization(layers[-1],
                                          name="{}_batchnorm".format(name),
                                          training=is_training))

    # activation
    if activation_function:
        if isinstance(activation_function, str):
            activation_function = utils.get_activation_function(
                activation_function)
        layers.append(
            activation_function(layers[-1], name="{}_activation".format(name)))

    # dropout
    if dropout_rate > 0:
        layers.append(
            tf.layers.dropout(layers[-1],
                              dropout_rate,
                              name="{}_dropout".format(name),
                              training=is_training))

    # remove the input layer from the layers list
    del layers[0]

    return layers
예제 #2
0
def batchnorm_activation_dropout(input_layer, batchnorm, activation_function,
                                 dropout_rate, is_training, name):
    layers = [input_layer]

    # Rationale: passing a bool here will mean that batchnorm and or activation will never activate
    assert not isinstance(is_training, bool)

    # batchnorm
    if batchnorm:
        layers.append(
            tf.layers.batch_normalization(layers[-1],
                                          name="{}_batchnorm".format(name),
                                          training=is_training))

    # activation
    if activation_function:
        if isinstance(activation_function, str):
            activation_function = utils.get_activation_function(
                activation_function)
        layers.append(
            activation_function(layers[-1], name="{}_activation".format(name)))

    # dropout
    if dropout_rate > 0:
        layers.append(
            tf.layers.dropout(layers[-1],
                              dropout_rate,
                              name="{}_dropout".format(name),
                              training=is_training))

    # remove the input layer from the layers list
    del layers[0]

    return layers