Esempio n. 1
0
 def inner(x):
     x = LayerNormalization()(x)
     x = Activation("relu")(x)
     x = Convolution2D(channels, 3, strides=strides, **params)(x)
     x = Dropout(drop_rate)(x) if drop_rate > 0 else x
     x = LayerNormalization()(x)
     x = Activation("relu")(x)
     x = Convolution2D(channels, 3, **params)(x)
     return x
Esempio n. 2
0
    def wide_resnet_impl(input_shape, output_size):
        def conv(channels,
                 strides,
                 params=dict(padding="same",
                             use_bias=False,
                             kernel_regularizer=l2(l2_reg))):
            def inner(x):
                x = LayerNormalization()(x)
                x = Activation("relu")(x)
                x = Convolution2D(channels, 3, strides=strides, **params)(x)
                x = Dropout(drop_rate)(x) if drop_rate > 0 else x
                x = LayerNormalization()(x)
                x = Activation("relu")(x)
                x = Convolution2D(channels, 3, **params)(x)
                return x

            return inner

        def resize(x, shape):
            if K.int_shape(x) == shape:
                return x
            channels = shape[3 if K.image_data_format() ==
                             "channels_last" else 1]
            strides = K.int_shape(x)[2] // shape[2]
            return Convolution2D(channels,
                                 1,
                                 padding="same",
                                 use_bias=False,
                                 strides=strides)(x)

        def block(channels, k, n, strides):
            def inner(x):
                for i in range(n):
                    x2 = conv(channels * k, strides if i == 0 else 1)(x)
                    x = add([resize(x, K.int_shape(x2)), x2])
                return x

            return inner

        # According to the paper L = 6*n+4
        n = int((L - 4) / 6)

        group0 = Convolution2D(16,
                               3,
                               padding="same",
                               use_bias=False,
                               kernel_regularizer=l2(l2_reg))
        group1 = block(16, k, n, 1)
        group2 = block(32, k, n, 2)
        group3 = block(64, k, n, 2)

        x_in = x = Input(shape=input_shape)
        x = group0(x)
        x = group1(x)
        x = group2(x)
        x = group3(x)

        x = LayerNormalization()(x)
        x = Activation("relu")(x)
        x = GlobalAveragePooling2D()(x)
        x = Dense(output_size, kernel_regularizer=l2(l2_reg))(x)
        y = Activation("softmax")(x)

        model = Model(inputs=x_in, outputs=y)
        model.compile(loss="categorical_crossentropy",
                      optimizer="adam",
                      metrics=["accuracy"])

        return model