Exemple #1
0
    def from_config(cls, config):
        # Use layer build function to initialise new NeuralGraphOutput
        inner_layer_config = config.pop('inner_layer_config')
        create_inner_layer_fn = lambda: layer_from_config(
            deepcopy(inner_layer_config))

        layer = cls(create_inner_layer_fn, **config)
        return layer
Exemple #2
0
    def __init__(self, inner_layer_arg, **kwargs):
        # Initialise based on one of the three initialisation methods

        # Case 1: Check if inner_layer_arg is conv_width
        if isinstance(inner_layer_arg, (int)):
            self.conv_width = inner_layer_arg
            # Keras2: we assume all the kwargs should be passed to the Dense layer
            # dense_layer_kwargs, kwargs = filter_func_args(layers.Dense.__init__,
            # kwargs, overrule_args=['name'])
            self.create_inner_layer_fn = lambda: layers.Dense(
                self.conv_width, **kwargs)  #dense_layer_kwargs)

        # Case 2: Check if an initialised keras layer is given
        elif isinstance(inner_layer_arg, layers.Layer):
            assert inner_layer_arg.built == False, 'When initialising with a keras layer, it cannot be built.'
            _, self.conv_width = inner_layer_arg.compute_output_shape(
                (None, 1))
            # layer_from_config will mutate the config dict, therefore create a get fn
            self.create_inner_layer_fn = lambda: layer_from_config(
                dict(class_name=inner_layer_arg.__class__.__name__,
                     config=inner_layer_arg.get_config()))

        # Case 3: Check if a function is provided that returns a initialised keras layer
        elif callable(inner_layer_arg):
            example_instance = inner_layer_arg()
            assert isinstance(
                example_instance, layers.Layer
            ), 'When initialising with a function, the function has to return a keras layer'
            assert example_instance.built == False, 'When initialising with a keras layer, it cannot be built.'
            _, self.conv_width = example_instance.compute_output_shape(
                (None, 1))
            self.create_inner_layer_fn = inner_layer_arg

        else:
            raise ValueError(
                'NeuralGraphHidden has to be initialised with 1). int conv_widht, 2). a keras layer instance, or 3). a function returning a keras layer instance.'
            )

        super(NeuralGraphHidden, self).__init__(
        )  # Keras2: all the kwargs will be passed to the Dense layer only
def create_mobilenet_v2(input_shape=(32, 32, 3),
                        num_classes=10,
                        is_pretrained=False,
                        mobilenet_shape=(224, 224, 3),
                        num_change_strides=0,
                        seperate_softmax=True):

    assert (input_shape[0] == input_shape[1])
    assert (224 % input_shape[0] == 0)

    mobilenet = None

    if is_pretrained:

        mobilenet = MobileNetV2(input_shape=mobilenet_shape,
                                include_top=True,
                                alpha=1.0,
                                weights='imagenet',
                                seperate_softmax=seperate_softmax,
                                backend=keras.backend,
                                layers=keras.layers,
                                models=keras.models,
                                utils=keras.utils)

        return mobilenet

    else:
        mobilenet = MobileNetV2(input_shape=mobilenet_shape,
                                include_top=True,
                                weights=None,
                                classes=num_classes,
                                seperate_softmax=seperate_softmax,
                                output_stride=None,
                                backend=keras.backend,
                                layers=keras.layers,
                                models=keras.models,
                                utils=keras.utils)

        scale_factor = mobilenet_shape[0] // input_shape[0]

        output_residual = queue.Queue(2)

        input_net = Input(input_shape)
        if scale_factor > 1:
            x = UpSampling2D((scale_factor, scale_factor))(input_net)
        else:
            x = input_net

        for layer in mobilenet.layers[1:-2]:

            config = layer.get_config()

            # check if stride has to be changed
            if num_change_strides > 0:
                if layer.__class__ == keras.layers.ZeroPadding2D:
                    continue
                if 'strides' in config.keys():
                    if config['strides'] == (2, 2):
                        config['strides'] = (1, 1)
                        config['padding'] = 'same'
                        num_change_strides -= 1

            next_layer = layer_from_config({
                'class_name': layer.__class__.__name__,
                'config': config
            })

            if isinstance(layer, Add):
                x = next_layer([x, output_residual.get()])
            else:
                x = next_layer(x)

            if "_project_BN" in layer.name:
                if output_residual.full():
                    output_residual.get()
                output_residual.put(x)
            if "_add" in layer.name:
                if output_residual.full():
                    output_residual.get()
                output_residual.put(x)

        x = Dropout(rate=0.2)(x)
        x = Dense(num_classes, activation=None)(x)
        x = Activation('softmax')(x)

        return Model(inputs=input_net, outputs=x, name='mobilenetv2')
Exemple #4
0
 def create_inner_layer_fn():
     return layer_from_config(deepcopy(inner_layer_config))