Beispiel #1
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer
        if self.use_residual:
            input_res = masked_layer
        mask = masked_layer[:, :, 0]

        for d in range(self.depth):

            x = self._inception_module(x, mask)

            if self.use_residual and d % 3 == 2:
                input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                               mask=mask)
                x = self._shortcut_layer(input_res, x)
                input_res = x

        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(x, mask=mask)

        output_layer = keras.layers.Dense(self.nb_filters)(gap_layer)
        output_layer = keras.layers.LeakyReLU()(output_layer)
        output_layer = keras.layers.Dense(self.nb_filters,
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(nb_classes)(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        model.compile(loss=self.loss,
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=300)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Beispiel #2
0
def build_model(hl, af, l2, sh, nn):
    m = Sequential()
    m.add(
        Dense(hl,
              activation=af,
              kernel_regularizer=regularizers.l2(l2),
              kernel_initializer=GlorotUniform(seed=0),
              input_shape=(sh, )))
    for _ in range(nn - 1):
        m.add(
            Dense(hl,
                  activation=af,
                  kernel_initializer=GlorotUniform(seed=0),
                  kernel_regularizer=regularizers.l2(l2)))
    m.add(coral.CoralOrdinal(3))
    m.compile(optimizer='adam',
              loss=coral.OrdinalCrossEntropy(num_classes=3),
              metrics=[coral.MeanAbsoluteErrorLabels()])
    return m
Beispiel #3
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer

        channels = []

        for i in range(input_shape[-1]):

            input = tf.keras.backend.expand_dims(x[..., i], axis=-1)
            input_res = tf.keras.backend.expand_dims(x[..., i], axis=-1)

            for d in range(self.depth):
                input = self._inception_module(input, masked_layer)

                if self.use_residual and d % 3 == 2:
                    input_res = keras.layers.Lambda(
                        (lambda x: x))(input_res, mask=masked_layer[:, :, 0])
                    input = self._shortcut_layer(input_res, input)
                    input_res = input
            input = keras.layers.Lambda((lambda x: x))(input,
                                                       mask=masked_layer[:, :,
                                                                         0])
            input = keras.layers.Conv1D(filters=1,
                                        kernel_size=1,
                                        padding='same',
                                        use_bias=False)(input)
            channels.append(input)

        x = keras.layers.Concatenate(axis=-1, name='concat')(channels)
        x = keras.layers.Lambda((lambda x: x))(x, mask=masked_layer[:, :, 0])
        x = keras.layers.Conv1D(4 * self.nb_filters,
                                self.kernel_size,
                                padding='same')(x)
        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(
            x, mask=masked_layer[:, :, 0])

        output_layer = keras.layers.Dense(self.nb_filters,
                                          name='result1')(gap_layer)
        output_layer = keras.layers.Dense(self.nb_filters,
                                          name='result2',
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(nb_classes)(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        model.compile(loss=coral.OrdinalCrossEntropy(num_classes=nb_classes),
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=150)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Beispiel #4
0
    def build(self, hp):
        input_layer = keras.layers.Input(self.input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer
        input_res = masked_layer

        for d in range(hp.Int('inception_modules', 1, 3)):

            x = self._inception_module(x, masked_layer, hp)

            if hp.Boolean('use_residual') and d % 3 == 2:
                input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                               mask=masked_layer[:, :, 0])
                x = self._shortcut_layer(input_res, x)
                input_res = x

        x = keras.layers.Dropout(hp.Float('dropout', 0.0, 0.4, step=0.1))(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(
            x, mask=masked_layer[:, :, 0])

        for i in range(hp.Int('nb_dense', 0, 2, step=1)):
            gap_layer = keras.layers.Dense(
                hp.Int(f"dense_{i}", 16, 64, step=16))(gap_layer)

        output_layer = coral.CoralOrdinal(self.num_classes)(gap_layer)
        # output_layer = keras.layers.Dense(self.num_classes, activation='softmax',
        #                                   name='result2')(gap_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer,
                                   outputs=output_layer)

        print('line 106')

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        lr = hp.Float('learning_rate', 1e-5, 1e-2,
                      sampling='LOG', default=1e-3)
        model.compile(loss=coral.OrdinalCrossEntropy(num_classes=self.num_classes),
                      optimizer=keras.optimizers.Adam(lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        # reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
        #                                               actor=0.5, patience=50,
        #                                               min_lr=0.0001)

        # file_path = self.output_directory + 'best_model.hdf5'

        # model_checkpoint = keras.callbacks.ModelCheckpoint(
        #     filepath=file_path, monitor='val_accuracy',
        #     save_best_only=True, mode='max')

        # stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
        #                                            restore_best_weights=True,
        #                                            patience=300)

        schedule = StepDecay(initAlpha=lr, factor=hp.Float(
            'lr_factor', 0.7, 1.0, step=0.1), dropEvery=hp.Int('lr_dropstep', 10, 40, step=10))
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [lr_decay]

        model.summary()
        return model
Beispiel #5
0
    def build_model(self):
        input_layer = keras.layers.Input(batch_shape=self.input_shape)
        masked = keras.layers.Masking(mask_value=-1000)(input_layer)

        conv_2d = tf.keras.backend.expand_dims(masked, axis=-1)
        if self.use_1d:
            conv_1d = masked

        for i in range(self.depth):
            if self.use_inception:
                kernel_size_s = [self.window // (2**i) for i in range(3)]
                conv_l = []
                for i in range(len(kernel_size_s)):
                    layer = keras.layers.Conv2D(self.filters,
                                                (kernel_size_s[i], 1),
                                                padding='same',
                                                use_bias=True,
                                                activation='relu')(conv_2d)
                    conv_l.append(
                        keras.layers.Lambda((lambda x: x))(layer,
                                                           mask=masked[:, :,
                                                                       0]))

                conv_2d = keras.layers.Concatenate(axis=-1)(conv_l)
                # conv_2d_incep = keras.layers.Conv2D(self.filters, (int(self.window/2), 1),
                #                               padding='same')(conv_2d)
                # conv_2d_incep = keras.layers.Lambda((lambda x: x))(conv_2d_incep,
                #                                              mask=masked[:, :, 0])
                # conv_2d_incep = keras.layers.BatchNormalization()(conv_2d_incep)
                # conv_2d_incep = keras.layers.Activation(activation='relu')(conv_2d_incep)
            else:
                conv_2d = keras.layers.Conv2D(self.filters, (self.window, 1),
                                              padding='same')(conv_2d)
                conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                             mask=masked[:, :,
                                                                         0])
                # print('after conv2d: {}'.format(conv_2d))
                conv_2d = keras.layers.BatchNormalization()(conv_2d)
                conv_2d = keras.layers.Activation(activation='relu')(conv_2d)

                # if self.use_inception:
                #     conv_2d = keras.layers.Concatenate(axis=-1)([conv_2d, conv_2d_incep])

            if self.use_bottleneck:
                conv_2d = keras.layers.Conv2D(self.bottleneck_size, (1, 1),
                                              padding='same')(conv_2d)
                conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                             mask=masked[:, :,
                                                                         0])

            if self.use_1d:
                if self.use_inception:
                    kernel_size_s = [self.window // (2**i) for i in range(3)]
                    conv_l = []
                    for i in range(len(kernel_size_s)):
                        layer = keras.layers.Conv1D(self.filters,
                                                    kernel_size_s[i],
                                                    padding='same',
                                                    use_bias=True,
                                                    activation='relu')(conv_1d)
                        conv_l.append(
                            keras.layers.Lambda(
                                (lambda x: x))(layer, mask=masked[:, :, 0]))

                    conv_1d = keras.layers.Concatenate(axis=-1)(conv_l)
                    # conv_1d_incep = keras.layers.Conv1D(self.filters, int(self.window/2),
                    #                               padding='same')(conv_1d)
                    # conv_1d_incep = keras.layers.Lambda((lambda x: x))(conv_1d_incep,
                    #                                              mask=masked[:, :, 0])
                    # # print('after conv2d: {}'.format(conv_2d))
                    # conv_1d_incep = keras.layers.BatchNormalization()(conv_1d_incep)
                    # conv_1d_incep = keras.layers.Activation(activation='relu')(conv_1d_incep)
                else:
                    conv_1d = keras.layers.Conv1D(self.filters,
                                                  self.window,
                                                  padding='same')(conv_1d)
                    conv_1d = keras.layers.Lambda(
                        (lambda x: x))(conv_1d, mask=masked[:, :, 0])
                    # print('after conv2d: {}'.format(conv_2d))
                    conv_1d = keras.layers.BatchNormalization()(conv_1d)
                    conv_1d = keras.layers.Activation(
                        activation='relu')(conv_1d)

                # if self.use_inception:
                #     conv_1d = keras.layers.Concatenate(axis=-1)([conv_1d, conv_1d_incep])

                if self.use_bottleneck:
                    conv_1d = keras.layers.Conv1D(self.bottleneck_size,
                                                  1,
                                                  padding='same')(conv_1d)
                    conv_1d = keras.layers.Lambda(
                        (lambda x: x))(conv_1d, mask=masked[:, :, 0])

        conv_2d = keras.layers.Conv2D(1, (1, 1),
                                      padding='same',
                                      activation='relu')(conv_2d)
        conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                     mask=masked[:, :, 0])

        if self.use_1d:
            conv_1d = keras.layers.Conv1D(1,
                                          1,
                                          padding='same',
                                          activation='relu')(conv_1d)
            conv_1d = keras.layers.Lambda((lambda x: x))(conv_1d,
                                                         mask=masked[:, :, 0])

        print('after 1x1 conv2d: {}'.format(conv_2d))

        feats = tf.keras.backend.squeeze(conv_2d, -1)
        if self.use_1d:
            feats = keras.layers.Concatenate(axis=2)([feats, conv_1d])

        # feats = keras.layers.Conv1D(2 * self.filters, self.window,
        #                             padding='same', name='conv-final')(feats)
        # feats = keras.layers.Lambda((lambda x: x),
        #                             name='lambda_final')(feats,
        #                                                  mask=masked[:, :, 0])
        # print('after conv1d: {}'.format(feats))
        # feats = keras.layers.BatchNormalization()(feats)
        # feats = keras.layers.Activation(activation='relu')(feats)
        print('before gap: {}'.format(feats))
        gap_layer = keras.layers.GlobalAveragePooling1D()(feats,
                                                          mask=masked[:, :, 0])

        output_layer = keras.layers.Dense(self.filters,
                                          activation='relu')(gap_layer)
        output_layer = keras.layers.Dense(self.filters,
                                          activation='relu',
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(self.nb_classes)(output_layer)
        # output_layer = keras.layers.Dense(self.nb_classes,
        #                                   activation='softmax')(output_layer)

        model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model.compile(
            loss=coral.OrdinalCrossEntropy(num_classes=self.nb_classes),
            optimizer=keras.optimizers.Adam(self.lr),
            metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      factor=0.75,
                                                      patience=50,
                                                      min_lr=0.0001)
        file_path = self.output_directory + 'best_model.hdf5'
        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=200)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model