def build_model(self, input_shape, n_classes):
        ## input layer
        input_layer = Input(input_shape)

        ## convolutional layers
        conv_layer1 = Conv2D(filters=8, kernel_size=(3, 3),
                             activation='relu')(input_layer)
        conv_layer2 = Conv2D(filters=16, kernel_size=(3, 3),
                             activation='relu')(conv_layer1)

        ## add max pooling to obtain the most imformatic features
        pooling_layer1 = MaxPool2D(pool_size=(2, 2))(conv_layer2)

        conv_layer3 = Conv2D(filters=32, kernel_size=(3, 3),
                             activation='relu')(pooling_layer1)
        conv_layer4 = Conv2D(filters=64, kernel_size=(3, 3),
                             activation='relu')(conv_layer3)
        pooling_layer2 = MaxPool2D(pool_size=(2, 2))(conv_layer4)

        ## perform batch normalization on the convolution outputs before feeding it to MLP architecture
        pooling_layer2 = BatchNormalization()(pooling_layer2)
        flatten_layer = Flatten()(pooling_layer2)

        ## create an MLP architecture with dense layers : 4096 -> 512 -> 10
        ## add dropouts to avoid overfitting / perform regularization
        dense_layer1 = Dense(units=2048, activation='relu')(flatten_layer)
        dense_layer1 = Dropout(0.4)(dense_layer1)
        dense_layer2 = Dense(units=512, activation='relu')(dense_layer1)
        dense_layer2 = Dropout(0.4)(dense_layer2)
        output_layer = Dense(units=n_classes,
                             activation='softmax')(dense_layer2)

        ## define the model with input layer and output layer
        model = Model(inputs=input_layer, outputs=output_layer)
        model.summary()

        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)

        model.compile(loss=categorical_crossentropy,
                      optimizer=Adadelta(),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = callbacks.ModelCheckpoint(filepath=file_path,
                                                     monitor='loss',
                                                     save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model
    def build_model(self, input_shape, n_classes):
        xception_1 = self.xception(include_top=False, pooling='max')
        for layer in xception_1.layers:
            layer.trainable = False
        input_1 = xception_1.input
        output_1 = xception_1.output
        xception_2 = self.xception(include_top=False, pooling='max')
        for layer in xception_2.layers:
            layer.trainable = False
        input_2 = xception_2.input
        output_2 = xception_2.output
        xception_3 = self.xception(include_top=False, pooling='max')
        for layer in xception_3.layers:
            layer.trainable = False
        input_3 = xception_3.input
        output_3 = xception_3.output
        xception_4 = self.xception(include_top=False, pooling='max')
        for layer in xception_4.layers:
            layer.trainable = False
        input_4 = xception_4.input
        output_4 = xception_4.output

        concat_layer = Maximum()([output_1, output_2, output_3, output_4])
        concat_layer.trainable = False
        # concat_layer = Dropout(0.25)(concat_layer)
        # dense_layer1 = Dense(units=1024, activation='relu')(concat_layer)
        dense_layer1 = Dropout(0.5)(concat_layer)
        output_layer = Dense(n_classes,
                             activation='softmax',
                             name='predictions')(dense_layer1)

        model = Model(inputs=[input_1, input_2, input_3, input_4],
                      outputs=[output_layer])
        model.summary()
        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)
        model.compile(loss=categorical_crossentropy,
                      optimizer=Adam(lr=0.01),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = keras.callbacks.ModelCheckpoint(filepath=file_path,
                                                           monitor='loss',
                                                           save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model
示例#3
0
    def build_model(self, input_shape, n_classes):
        # Load the VGG model
        xception_conv = Xception(weights='imagenet',
                                 include_top=False,
                                 input_shape=input_shape)

        # Freeze the layers except the last 4 layers
        for layer in xception_conv.layers:
            layer.trainable = False

        # Create the model
        model = models.Sequential()

        # Add the vgg convolutional base model
        model.add(xception_conv)
        # Add new layers
        model.add(Flatten())
        model.add(Dense(1024, activation='relu'))
        model.add(Dropout(0.5))
        model.add(Dense(n_classes, activation='softmax', name='predictions'))

        # define the model with input layer and output layer
        model.summary()
        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)
        model.compile(loss=categorical_crossentropy,
                      optimizer=Adam(lr=0.01),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = callbacks.ModelCheckpoint(filepath=file_path,
                                                     monitor='loss',
                                                     save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model
    def build_model(self, input_shape, n_classes):
        ## input layer
        input_layer = Input(input_shape)

        ## convolutional layers
        conv_layer1 = Conv2D(filters=16, kernel_size=(3, 3), activation='relu')(input_layer)
        pooling_layer1 = MaxPool2D(pool_size=(2, 2), strides=(2, 2))(conv_layer1)

        conv_layer2 = Conv2D(filters=32, kernel_size=(3, 3), activation='relu')(pooling_layer1)

        conv_layer3 = Conv2D(filters=32, kernel_size=(3, 3), activation='relu')(conv_layer2)
        pooling_layer2 = MaxPool2D(pool_size=(2, 2), strides=(2, 2))(conv_layer3)
        dropout_layer =Dropout(0.5)(pooling_layer2)

        dense_layer = Dense(units=2048, activation='relu')(dropout_layer)
        output_layer = Dense(units=n_classes, activation='softmax')(dense_layer)

        ## define the model with input layer and output layer
        model = Model(inputs=input_layer, outputs=output_layer)
        model.summary()

        plot_model(model, to_file=self.output_directory + '/model_graph.png', show_shapes=True, show_layer_names=True)

        model.compile(loss=categorical_crossentropy, optimizer=Adam(), metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = callbacks.ModelCheckpoint(filepath=file_path, monitor='loss', save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)
        
        self.callbacks = [model_checkpoint, tb_cb]
        return model
    def build_model(self, input_shape, n_classes):
        # input layer
        input_layer = Input(input_shape)
        channel_axis = -1  # channel_axis = 1 if backend.image_data_format() == 'channels_first' else -1

        # Block 1
        x = Conv3D(8, (3, 3, 3), use_bias=False,
                   name='block1_conv1')(input_layer)
        x = BatchNormalization(axis=channel_axis, name='block1_conv1_bn')(x)
        x = Activation('relu', name='block1_conv1_act')(x)
        x = Conv3D(8, (3, 3, 2), use_bias=False, name='block1_conv2')(x)
        x = BatchNormalization(axis=channel_axis, name='block1_conv2_bn')(x)
        x = Activation('relu', name='block1_conv2_act')(x)

        residual = Conv3D(16, (1, 1, 1),
                          strides=(2, 2, 1),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 2
        x = Conv3D(16, (3, 3, 1),
                   padding='same',
                   use_bias=False,
                   name='block2_conv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block2_conv1_bn')(x)

        x = MaxPooling3D((3, 3, 1),
                         strides=(2, 2, 1),
                         padding='same',
                         name='block2_pool')(x)
        x = add([x, residual])

        residual = Conv3D(32, (1, 1, 1),
                          strides=(2, 2, 1),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 3
        x = Activation('relu', name='block3_conv1_act')(x)
        x = Conv3D(32, (3, 3, 1),
                   padding='same',
                   use_bias=False,
                   name='block3_conv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block3_conv1_bn')(x)

        x = MaxPooling3D((3, 3, 1),
                         strides=(2, 2, 1),
                         padding='same',
                         name='block3_pool')(x)
        x = add([x, residual])

        # Block 4
        x = Conv3D(64, (3, 3, 1),
                   padding='same',
                   use_bias=False,
                   name='block4_conv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block4_conv1_bn')(x)
        x = Activation('relu', name='block4_conv1_act')(x)

        # Classification block
        x = GlobalAveragePooling3D(name='avg_pool')(x)
        output_layer = Dense(n_classes,
                             activation='softmax',
                             name='predictions')(x)

        # ## create an MLP architecture with dense layers : 4096 -> 512 -> 10
        # ## add dropouts to avoid overfitting / perform regularization
        # dense_layer1 = Dense(units=2048, activation='relu')(x)
        # dense_layer1 = Dropout(0.4)(dense_layer1)
        # dense_layer2 = Dense(units=512, activation='relu')(dense_layer1)
        # dense_layer2 = Dropout(0.4)(dense_layer2)
        # output_layer = Dense(units=n_classes, activation='softmax')(dense_layer2)

        # define the model with input layer and output layer
        model = Model(inputs=input_layer, outputs=output_layer)
        model.summary()

        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)

        model.compile(loss=categorical_crossentropy,
                      optimizer=Adadelta(lr=0.1),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = keras.callbacks.ModelCheckpoint(filepath=file_path,
                                                           monitor='loss',
                                                           save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model
    def build_model(self, input_shape, n_classes):
        # input layer
        input_layer = Input(input_shape)
        # Block 1
        x = Conv3D(64, (3, 3, 3),
                   activation='relu',
                   padding='same',
                   name='block1_conv1')(input_layer)
        x = Conv3D(64, (3, 3, 3),
                   activation='relu',
                   padding='same',
                   name='block1_conv2')(x)
        x = MaxPool3D((2, 2, 2), strides=(2, 2, 1), name='block1_pool')(x)

        # Block 2
        x = Conv3D(128, (3, 3, 3),
                   activation='relu',
                   padding='same',
                   name='block2_conv1')(x)
        x = Conv3D(128, (3, 3, 3),
                   activation='relu',
                   padding='same',
                   name='block2_conv2')(x)
        x = MaxPool3D((2, 2, 2), strides=(2, 2, 1), name='block2_pool')(x)

        # Block 3
        x = Conv3D(256, (3, 3, 2),
                   activation='relu',
                   padding='same',
                   name='block3_conv1')(x)
        x = Conv3D(256, (3, 3, 2),
                   activation='relu',
                   padding='same',
                   name='block3_conv2')(x)
        x = Conv3D(256, (3, 3, 2),
                   activation='relu',
                   padding='same',
                   name='block3_conv3')(x)
        x = Conv3D(256, (3, 3, 2),
                   activation='relu',
                   padding='same',
                   name='block3_conv4')(x)
        x = MaxPool3D((2, 2, 2), strides=(2, 2, 1), name='block3_pool')(x)

        # Block 4
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block4_conv1')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block4_conv2')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block4_conv3')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block4_conv4')(x)
        x = MaxPool3D((2, 2, 1), strides=(2, 2, 1), name='block4_pool')(x)

        # Block 5
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block5_conv1')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block5_conv2')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block5_conv3')(x)
        x = Conv3D(512, (3, 3, 1),
                   activation='relu',
                   padding='same',
                   name='block5_conv4')(x)
        x = MaxPool3D((2, 2, 1), strides=(2, 2, 1), name='block5_pool')(x)

        # Classification block
        x = Flatten(name='flatten')(x)
        x = Dense(4096, activation='relu', name='fc1')(x)
        x = Dropout(0.4)(x)
        x = Dense(4096, activation='relu', name='fc2')(x)
        x = Dropout(0.4)(x)
        output_layer = Dense(n_classes,
                             activation='softmax',
                             name='predictions')(x)

        ## define the model with input layer and output layer
        model = Model(inputs=input_layer, outputs=output_layer)
        model.summary()

        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)

        model.compile(loss=categorical_crossentropy,
                      optimizer=Adadelta(lr=0.1),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = callbacks.ModelCheckpoint(filepath=file_path,
                                                     monitor='loss',
                                                     save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model
示例#7
0
    def build_model(self, input_shape, n_classes):
        # input layer
        input_layer = Input(input_shape)
        channel_axis = -1  # channel_axis = 1 if backend.image_data_format() == 'channels_first' else -1
        # Block 1
        x = Conv2D(32, (3, 3),
                   strides=(2, 2),
                   use_bias=False,
                   name='block1_conv1')(input_layer)
        x = BatchNormalization(axis=channel_axis, name='block1_conv1_bn')(x)
        x = Activation('relu', name='block1_conv1_act')(x)
        x = Conv2D(64, (3, 3), use_bias=False, name='block1_conv2')(x)
        x = BatchNormalization(axis=channel_axis, name='block1_conv2_bn')(x)
        x = Activation('relu', name='block1_conv2_act')(x)

        residual = Conv2D(128, (1, 1),
                          strides=(2, 2),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 2
        x = SeparableConv2D(128, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block2_sepconv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block2_sepconv1_bn')(x)
        x = Activation('relu', name='block2_sepconv2_act')(x)
        x = SeparableConv2D(128, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block2_sepconv2')(x)
        x = BatchNormalization(axis=channel_axis, name='block2_sepconv2_bn')(x)

        x = MaxPooling2D((3, 3),
                         strides=(2, 2),
                         padding='same',
                         name='block2_pool')(x)
        x = add([x, residual])

        residual = Conv2D(256, (1, 1),
                          strides=(2, 2),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 3
        x = Activation('relu', name='block3_sepconv1_act')(x)
        x = SeparableConv2D(256, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block3_sepconv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block3_sepconv1_bn')(x)
        x = Activation('relu', name='block3_sepconv2_act')(x)
        x = SeparableConv2D(256, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block3_sepconv2')(x)
        x = BatchNormalization(axis=channel_axis, name='block3_sepconv2_bn')(x)

        x = MaxPooling2D((3, 3),
                         strides=(2, 2),
                         padding='same',
                         name='block3_pool')(x)
        x = add([x, residual])

        residual = Conv2D(728, (1, 1),
                          strides=(2, 2),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 4
        x = Activation('relu', name='block4_sepconv1_act')(x)
        x = SeparableConv2D(728, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block4_sepconv1')(x)
        x = BatchNormalization(axis=channel_axis, name='block4_sepconv1_bn')(x)
        x = Activation('relu', name='block4_sepconv2_act')(x)
        x = SeparableConv2D(728, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block4_sepconv2')(x)
        x = BatchNormalization(axis=channel_axis, name='block4_sepconv2_bn')(x)

        x = MaxPooling2D((3, 3),
                         strides=(2, 2),
                         padding='same',
                         name='block4_pool')(x)
        x = add([x, residual])

        # Block 5-12
        for i in range(8):
            residual = x
            prefix = 'block' + str(i + 5)

            x = Activation('relu', name=prefix + '_sepconv1_act')(x)
            x = SeparableConv2D(728, (3, 3),
                                padding='same',
                                use_bias=False,
                                name=prefix + '_sepconv1')(x)
            x = BatchNormalization(axis=channel_axis,
                                   name=prefix + '_sepconv1_bn')(x)
            x = Activation('relu', name=prefix + '_sepconv2_act')(x)
            x = SeparableConv2D(728, (3, 3),
                                padding='same',
                                use_bias=False,
                                name=prefix + '_sepconv2')(x)
            x = BatchNormalization(axis=channel_axis,
                                   name=prefix + '_sepconv2_bn')(x)
            x = Activation('relu', name=prefix + '_sepconv3_act')(x)
            x = SeparableConv2D(728, (3, 3),
                                padding='same',
                                use_bias=False,
                                name=prefix + '_sepconv3')(x)
            x = BatchNormalization(axis=channel_axis,
                                   name=prefix + '_sepconv3_bn')(x)
            x = add([x, residual])

        residual = Conv2D(1024, (1, 1),
                          strides=(2, 2),
                          padding='same',
                          use_bias=False)(x)
        residual = BatchNormalization(axis=channel_axis)(residual)

        # Block 13
        x = Activation('relu', name='block13_sepconv1_act')(x)
        x = SeparableConv2D(728, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block13_sepconv1')(x)
        x = BatchNormalization(axis=channel_axis,
                               name='block13_sepconv1_bn')(x)
        x = Activation('relu', name='block13_sepconv2_act')(x)
        x = SeparableConv2D(1024, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block13_sepconv2')(x)
        x = BatchNormalization(axis=channel_axis,
                               name='block13_sepconv2_bn')(x)

        x = MaxPooling2D((3, 3),
                         strides=(2, 2),
                         padding='same',
                         name='block13_pool')(x)
        x = add([x, residual])

        # Block 14
        x = SeparableConv2D(1536, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block14_sepconv1')(x)
        x = BatchNormalization(axis=channel_axis,
                               name='block14_sepconv1_bn')(x)
        x = Activation('relu', name='block14_sepconv1_act')(x)
        x = SeparableConv2D(2048, (3, 3),
                            padding='same',
                            use_bias=False,
                            name='block14_sepconv2')(x)
        x = BatchNormalization(axis=channel_axis,
                               name='block14_sepconv2_bn')(x)
        x = Activation('relu', name='block14_sepconv2_act')(x)

        # Classification block
        x = GlobalAveragePooling2D(name='avg_pool')(x)
        output_layer = Dense(n_classes,
                             activation='softmax',
                             name='predictions')(x)

        # define the model with input layer and output layer
        model = Model(inputs=input_layer, outputs=output_layer)
        model.summary()

        plot_model(model,
                   to_file=self.output_directory + '/model_graph.png',
                   show_shapes=True,
                   show_layer_names=True)

        model.compile(loss=categorical_crossentropy,
                      optimizer=Adadelta(lr=0.1),
                      metrics=['acc'])

        # model save
        file_path = self.output_directory + '/best_model.hdf5'
        model_checkpoint = callbacks.ModelCheckpoint(filepath=file_path,
                                                     monitor='loss',
                                                     save_best_only=True)

        # Tensorboard log
        log_dir = self.output_directory + '/tf_logs'
        chk_n_mkdir(log_dir)
        tb_cb = TrainValTensorBoard(log_dir=log_dir)

        self.callbacks = [model_checkpoint, tb_cb]
        return model