Esempio n. 1
0
def conv_bn_act(input_tensor, conv_layer, n_filters, filter_size=3, strides=1, activation="relu", bn=None, conv_args={}, name=""):
    act = Activation(activation) if isinstance(activation, str) else activation
    act.name = name + "_act"

    x = conv_layer(n_filters, filter_size, name=name+"_conv", **conv_args, strides=strides)(input_tensor)
    if bn is None:
        x = act(x)
    elif bn == "pre_act":
        x = BatchNormalization(name=name+"_bn")(x)
        x = act(x)
    elif bn == "post_act":
        x = act(x)
        x = BatchNormalization(name=name+"_bn")(x)
    return x
Esempio n. 2
0
def dense_bn_act(input_tensor, n_dense, activation="relu", bn=None, dense_args={}, name=""):
    """
    activation: either string or an instance of a keras activation that can be called
                on a tensor
    dense_args: dictionary containing keys and values for e.g. kernel_initializer, bias_initializer, ...
    """
    act = Activation(activation) if isinstance(activation, str) else activation
    act.name = name + "_act"

    x = Dense(n_dense, name=name+"_dense", **dense_args)(input_tensor)
    if bn is None:
        x = act(x)
    elif bn == "pre_act":
        x = BatchNormalization(name=name+"_bn")(x)
        x = act(x)
    elif bn == "post_act":
        x = act(x)
        x = BatchNormalization(name=name+"_bn")(x)
    return x
 def __call__(self, act, name, *arg):
     """
     Create an activation layer
     act : type of act
     name : name of layer
     *arg : additional argument for activation
     """
     if act == 'sigmoid':
         la = Activation('sigmoid')
     elif act == 'softplus':
         la = Activation('softplus')
     elif act == 'softmax':
         la = Activation('softplus')
     elif act == 'relu':
         la = Activation('relu')
     elif act == 'sigmoid':
         la = Activation('sigmoid')
     elif act == 'selu':
         la = Activation('selu')
     elif act == 'tanh':
         la = Activation('tanh')
     elif act == 'linear':
         la = Activation('linear')
     elif act == 'softmax':
         la = Activation('softmax')
     elif act == 'leakyrelu':
         la = LeakyReLU(arg)
     elif act == 'elu':
         la = ELU(arg)
     elif (act == 'swish'):
         la = Activation('swish')
     else:
         print(act, "is not implemented")
         assert (False)
     la.name = name
     return la
Esempio n. 4
0
def modify_model_backprop(model, backprop_modifier):
    """Creates a copy of model by modifying all activations to use a custom op to modify the backprop behavior.

    Args:
        model:  The `keras.models.Model` instance.
        backprop_modifier: One of `{'guided', 'rectified'}`

    Returns:
        A copy of model with modified activations for backwards pass.
    """
    # The general strategy is as follows:
    # - Clone original model via save/load so that upstream callers don't see unexpected results with their models.
    # - Modify all activations in the model as ReLU.
    # - Save modified model so that it can be loaded with custom context modifying backprop behavior.
    # - Call backend specific function that registers the custom op and loads the model under modified context manager.
    # - Maintain cache to save this expensive process on subsequent calls.
    #
    # The reason for this round about way is because the graph needs to be rebuild when any of its layer builder
    # functions are changed. This is very complicated to do in Keras and makes the implementation very tightly bound
    # with keras internals. By saving and loading models, we dont have to worry about future compatibility.
    #
    # The only exception to this is the way advanced activations are handled which makes use of some keras internal
    # knowledge and might break in the future.

    # 0. Retrieve from cache if previously computed.
    modified_model = _MODIFIED_MODEL_CACHE.get((model, backprop_modifier))
    if modified_model is not None:
        return modified_model

    model_path = '/tmp/' + next(tempfile._get_candidate_names()) + '.h5'
    try:
        # 1. Clone original model via save and load.
        model.save(model_path)
        modified_model = load_model(model_path)

        # 2. Replace all possible activations with ReLU.
        for i, layer in utils.reverse_enumerate(modified_model.layers):
            if hasattr(layer, 'activation'):
                layer.activation = tf.nn.relu
            if isinstance(layer, _ADVANCED_ACTIVATIONS):
                # NOTE: This code is brittle as it makes use of Keras internal serialization knowledge and might
                # break in the future.
                modified_layer = Activation('relu')
                modified_layer.inbound_nodes = layer.inbound_nodes
                modified_layer.name = layer.name
                modified_model.layers[i] = modified_layer

        # 3. Save model with modifications.
        modified_model.save(model_path)

        # 4. Register modifier and load modified model under custom context.
        modifier_fn = _BACKPROP_MODIFIERS.get(backprop_modifier)
        if modifier_fn is None:
            raise ValueError("'{}' modifier is not supported".format(backprop_modifier))
        modifier_fn(backprop_modifier)

        # 5. Create graph under custom context manager.
        with tf.get_default_graph().gradient_override_map({'Relu': backprop_modifier}):
            #  This should rebuild graph with modifications.
            modified_model = load_model(model_path)

            # Cache to improve subsequent call performance.
            _MODIFIED_MODEL_CACHE[(model, backprop_modifier)] = modified_model
            return modified_model
    finally:
        os.remove(model_path)
Esempio n. 5
0
def get_copy_of_layer(layer, verbose=False):
    try:
        from keras.layers import Activation
        from keras import layers
    except:
        from tensorflow.keras.layers import Activation
        from tensorflow.keras import layers
    config = layer.get_config()

    # Non-standard relu6 layer (from MobileNet)
    if layer.__class__.__name__ == 'Activation':
        if config['activation'] == 'relu6':
            if get_keras_sub_version() == 1:
                from keras.applications.mobilenet import relu6
            else:
                from keras_applications.mobilenet import relu6
            layer_copy = Activation(relu6, name=layer.name)
            return layer_copy

    # DeepLabV3+ non-standard layer
    if layer.__class__.__name__ == 'BilinearUpsampling':
        from neural_nets.deeplab_v3_plus_model import BilinearUpsampling
        layer_copy = BilinearUpsampling(upsampling=config['upsampling'],
                                        output_size=config['output_size'],
                                        name=layer.name)
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'UpsampleLike':
        from keras_retinanet.layers import UpsampleLike
        layer_copy = UpsampleLike(name=layer.name)
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'Anchors':
        from keras_retinanet.layers import Anchors
        layer_copy = Anchors(name=layer.name,
                             size=config['size'],
                             stride=config['stride'],
                             ratios=config['ratios'],
                             scales=config['scales'])
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'RegressBoxes':
        from keras_retinanet.layers import RegressBoxes
        layer_copy = RegressBoxes(name=layer.name,
                                  mean=config['mean'],
                                  std=config['std'])
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'PriorProbability':
        from keras_retinanet.layers import PriorProbability
        layer_copy = PriorProbability(name=layer.name,
                                      mean=config['mean'],
                                      std=config['std'])
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'ClipBoxes':
        from keras_retinanet.layers import ClipBoxes
        layer_copy = ClipBoxes(name=layer.name)
        return layer_copy

    # RetinaNet non-standard layer
    if layer.__class__.__name__ == 'FilterDetections':
        from keras_retinanet.layers import FilterDetections
        layer_copy = FilterDetections(
            name=layer.name,
            max_detections=config['max_detections'],
            nms_threshold=config['nms_threshold'],
            score_threshold=config['score_threshold'],
            nms=config['nms'],
            class_specific_filter=config['class_specific_filter'],
            trainable=config['trainable'],
            parallel_iterations=config['parallel_iterations'])
        return layer_copy

    layer_copy = layers.deserialize({
        'class_name': layer.__class__.__name__,
        'config': config
    })
    try:
        layer_copy.name = layer.name
    except:
        layer_copy._name = layer._name
    return layer_copy