Example #1
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer
        if self.use_residual:
            input_res = masked_layer
        mask = masked_layer[:, :, 0]

        for d in range(self.depth):

            x = self._inception_module(x, mask)

            if self.use_residual and d % 3 == 2:
                input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                               mask=mask)
                x = self._shortcut_layer(input_res, x)
                input_res = x

        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(x, mask=mask)

        output_layer = keras.layers.Dense(self.nb_filters)(gap_layer)
        output_layer = keras.layers.LeakyReLU()(output_layer)
        output_layer = keras.layers.Dense(self.nb_filters,
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(nb_classes)(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        model.compile(loss=self.loss,
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=300)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #2
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000)(input_layer)
        x = masked_layer
        input_res = masked_layer
        mask = masked_layer[:, :, 0]

        for d in range(self.depth):

            x = self._inception_module(x, mask)

            if self.use_residual and d % 3 == 2:
                input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                               mask=mask)
                x = self._shortcut_layer(input_res, x)
                input_res = x

        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(x, mask=mask)

        output_layer = keras.layers.Dense(self.nb_filters)(gap_layer)
        output_layer = keras.layers.LeakyReLU()(output_layer)
        output_layer = keras.layers.Dense(nb_classes,
                                          activation='softmax')(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        loss = SparseCategoricalFocalLoss(gamma=3)
        model.compile(loss=loss,
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=['accuracy'])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_accuracy',
            save_best_only=True,
            mode='max')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=300)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #3
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer

        channels = []

        for i in range(input_shape[-1]):

            input = tf.keras.backend.expand_dims(x[..., i], axis=-1)
            input_res = tf.keras.backend.expand_dims(x[..., i], axis=-1)

            for d in range(self.depth):
                input = self._inception_module(input, masked_layer)

                if self.use_residual and d % 3 == 2:
                    input_res = keras.layers.Lambda(
                        (lambda x: x))(input_res, mask=masked_layer[:, :, 0])
                    input = self._shortcut_layer(input_res, input)
                    input_res = input
            input = keras.layers.Lambda((lambda x: x))(input,
                                                       mask=masked_layer[:, :,
                                                                         0])
            input = keras.layers.Conv1D(filters=1,
                                        kernel_size=1,
                                        padding='same',
                                        use_bias=False)(input)
            channels.append(input)

        x = keras.layers.Concatenate(axis=-1, name='concat')(channels)
        x = keras.layers.Lambda((lambda x: x))(x, mask=masked_layer[:, :, 0])
        x = keras.layers.Conv1D(4 * self.nb_filters,
                                self.kernel_size,
                                padding='same')(x)
        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(
            x, mask=masked_layer[:, :, 0])

        output_layer = keras.layers.Dense(self.nb_filters,
                                          name='result1')(gap_layer)
        output_layer = keras.layers.Dense(self.nb_filters,
                                          name='result2',
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(nb_classes)(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        model.compile(loss=coral.OrdinalCrossEntropy(num_classes=nb_classes),
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=150)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #4
0
    def build(self, hp):
        input_layer = keras.layers.Input(self.input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer
        input_res = masked_layer

        for d in range(hp.Int('inception_modules', 1, 3)):

            x = self._inception_module(x, masked_layer, hp)

            if hp.Boolean('use_residual') and d % 3 == 2:
                input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                               mask=masked_layer[:, :, 0])
                x = self._shortcut_layer(input_res, x)
                input_res = x

        x = keras.layers.Dropout(hp.Float('dropout', 0.0, 0.4, step=0.1))(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(
            x, mask=masked_layer[:, :, 0])

        for i in range(hp.Int('nb_dense', 0, 2, step=1)):
            gap_layer = keras.layers.Dense(
                hp.Int(f"dense_{i}", 16, 64, step=16))(gap_layer)

        output_layer = coral.CoralOrdinal(self.num_classes)(gap_layer)
        # output_layer = keras.layers.Dense(self.num_classes, activation='softmax',
        #                                   name='result2')(gap_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer,
                                   outputs=output_layer)

        print('line 106')

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        lr = hp.Float('learning_rate', 1e-5, 1e-2,
                      sampling='LOG', default=1e-3)
        model.compile(loss=coral.OrdinalCrossEntropy(num_classes=self.num_classes),
                      optimizer=keras.optimizers.Adam(lr),
                      metrics=[coral.MeanAbsoluteErrorLabels()])

        # reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
        #                                               actor=0.5, patience=50,
        #                                               min_lr=0.0001)

        # file_path = self.output_directory + 'best_model.hdf5'

        # model_checkpoint = keras.callbacks.ModelCheckpoint(
        #     filepath=file_path, monitor='val_accuracy',
        #     save_best_only=True, mode='max')

        # stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
        #                                            restore_best_weights=True,
        #                                            patience=300)

        schedule = StepDecay(initAlpha=lr, factor=hp.Float(
            'lr_factor', 0.7, 1.0, step=0.1), dropEvery=hp.Int('lr_dropstep', 10, 40, step=10))
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [lr_decay]

        model.summary()
        return model
Example #5
0
    def build_model(self):
        input_layer = keras.layers.Input(batch_shape=self.input_shape)
        masked = keras.layers.Masking(mask_value=-1000)(input_layer)

        conv_2d = tf.keras.backend.expand_dims(masked, axis=-1)
        conv_1d = masked

        for i in range(self.depth):
            conv_2d = keras.layers.Conv2D(self.filters, (self.window, 1),
                                          padding='same')(conv_2d)
            conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                         mask=masked[:, :, 0])
            # print('after conv2d: {}'.format(conv_2d))
            conv_2d = keras.layers.BatchNormalization()(conv_2d)
            conv_2d = keras.layers.Activation(activation='relu')(conv_2d)\

        conv_2d = keras.layers.Conv2D(1, (1, 1), padding='same',
                                      name='conv2d-1x1',
                                      activation='relu')(conv_2d)
        conv_2d = keras.layers.Lambda((lambda x: x),
                                      name='cam')(conv_2d,
                                                  mask=masked[:, :, 0])

        print('after 1x1 conv2d: {}'.format(conv_2d))

        feats = tf.keras.backend.squeeze(conv_2d, -1)


        feats = keras.layers.Conv1D(2 * self.filters, self.window,
                                    padding='same', name='conv-final')(feats)
        feats = keras.layers.Lambda((lambda x: x),
                                    name='lambda_final')(feats,
                                                         mask=masked[:, :, 0])
        print('after conv1d: {}'.format(feats))
        feats = keras.layers.BatchNormalization()(feats)
        feats = keras.layers.Activation(activation='relu')(feats)
        print('before gap: {}'.format(feats))
        gap_layer = keras.layers.GlobalAveragePooling1D()(feats,
                                                          mask=masked[:, :, 0])

        output_layer = keras.layers.Dense(self.filters)(gap_layer)
        output_layer = keras.layers.Dense(self.nb_classes,
                                          activation='softmax')(output_layer)

        model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model.compile(loss='categorical_crossentropy',
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=['accuracy'])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      factor=0.75, patience=50,
                                                      min_lr=0.0001)
        file_path = self.output_directory + 'best_model.hdf5'
        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path, monitor='val_loss',
            save_best_only=True, mode='min')


        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=200)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #6
0
    def build_model(self, input_shape, nb_classes):
        input_layer = keras.layers.Input(input_shape)
        masked_layer = keras.layers.Masking(mask_value=-1000,
                                            name='mask')(input_layer)
        x = masked_layer
        mask = masked_layer[:, :, 0]

        channels = []

        for i in range(input_shape[-1]):

            input = tf.keras.backend.expand_dims(x[..., i], axis=-1)
            if self.use_residual and d % 3 == 2:
                input_res = tf.keras.backend.expand_dims(x[..., i], axis=-1)

            for d in range(self.depth):
                input = self._inception_module(input, mask)

                if self.use_residual and d % 3 == 2:
                    input_res = keras.layers.Lambda((lambda x: x))(input_res,
                                                                   mask=mask)
                    input = self._shortcut_layer(input_res, input)
                    input_res = input
            input = keras.layers.Lambda((lambda x: x))(input, mask=mask)
            input = keras.layers.Conv1D(filters=1,
                                        kernel_size=self.kernel_size,
                                        padding='same',
                                        use_bias=False)(input)
            channels.append(input)

        x = keras.layers.Concatenate(axis=-1, name='concat')(channels)
        # x = keras.layers.Lambda((lambda x: x))(x,
        #                                        mask=masked_layer[:, :, 0])
        # x = keras.layers.Conv1D(4 * self.nb_filters, self.kernel_size,
        #                         padding='same')(x)
        # x = keras.layers.Dropout(0.2)(x)
        gap_layer = keras.layers.GlobalAveragePooling1D()(x, mask=mask)

        output_layer = keras.layers.Dense(self.nb_filters)(gap_layer)
        output_layer = keras.layers.LeakyReLU()(output_layer)
        output_layer = keras.layers.Dense(nb_classes,
                                          activation='softmax')(output_layer)

        # model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)

        # model.compile(loss='categorical_crossentropy', optimizer=keras.optimizers.Adam(self.lr),
        #               metrics=['accuracy'])
        #U = np.array([[3,0.0,0.0], [-0.5, 5.0, 5.0], [-0.5, 5.0, 5.0]])
        U = np.array([[3, 0.1, 0.1], [0, 5.0, 5.0], [0, 5.0, 5.0]])
        #U = np.array([[3,0,0],[-0.1,8,6],[0,7,8]])
        #U = np.array([[5,0.0,5], [0.1, 3.0, 0.1], [5.0, 0.0, 5.0]])
        U = np.array([[5, 5.0, 0.0], [5, 5.0, 0.0], [0.1, 0.1, 3.0]])
        #U = np.array([[1,0.2],[0,1.2]])
        #U = np.array([[3,2,0.0], [2, 3, 0.0], [0.5, 0.5, 1.5]])
        #U = np.array([[3,-0.1,2.0], [0.5, 2, 0.5], [2, -0.1, 3]])
        #U = np.array([[5,5,0.5], [5, 5, 0], [2, 2.2, 2.5]])
        U = np.array([[1, 0, 0], [0.65, 0.7, 0.65], [0, 0, 1]])
        U = np.array([[0.4, 0.1, 0.1], [0, 1, 1], [0, 1, 1]])
        #U = np.array([[1,1,0], [1, 1, 0], [0, 0, 0.1]])
        #U = np.array([[1,0,0], [0.35, 0.6, 0.35], [0, 0, 1]])
        loss = ConfusionCrossEntropy(U)

        model.compile(loss=loss,
                      optimizer=keras.optimizers.Adam(self.lr),
                      metrics=['accuracy'])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_accuracy',
            save_best_only=True,
            mode='max')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=150)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #7
0
    def build_model(self, input_shape, nb_classes):

        print('building')
        ip = keras.layers.Input(input_shape)
        print('input', ip)
        mask = keras.layers.Masking(mask_value=-1000)(ip)
        print(mask)
        # # x = AttentionLSTM(8)(mask)
        # x = keras.layers.LSTM(8)(mask)#, mask=mask[:, :, 0])
        # print('lstm', x)
        # x = keras.layers.Attention()(x)
        # x = keras.layers.Dropout(0.8)(x)
        #
        # print(x)

        y = keras.layers.Permute((2, 1))(mask)
        y = keras.layers.Conv1D(128,
                                8,
                                padding='same',
                                kernel_initializer='he_uniform')(y)
        y = keras.layers.BatchNormalization()(y)
        y = keras.layers.Activation('relu')(y)
        y = self.squeeze_excite_block(y)

        print(y)

        y = keras.layers.Conv1D(256,
                                5,
                                padding='same',
                                kernel_initializer='he_uniform')(y)
        y = keras.layers.BatchNormalization()(y)
        y = keras.layers.Activation('relu')(y)
        y = self.squeeze_excite_block(y)

        y = keras.layers.Conv1D(128,
                                3,
                                padding='same',
                                kernel_initializer='he_uniform')(y)
        y = keras.layers.BatchNormalization()(y)
        y = keras.layers.Activation('relu')(y)

        y = keras.layers.GlobalAveragePooling1D()(y)

        # x = keras.layers.concatenate([x, y])
        x = y
        out = keras.layers.Dense(nb_classes, activation='softmax')(x)

        model = keras.models.Model(ip, out)
        model.summary()

        # add load model code here to fine-tune

        optm = keras.optimizers.Adam(self.lr)
        model.compile(optimizer=optm,
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      actor=0.5,
                                                      patience=50,
                                                      min_lr=0.0001)

        file_path = self.output_directory + 'best_model.hdf5'

        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_accuracy',
            save_best_only=True,
            mode='max')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=300)

        schedule = StepDecay(initAlpha=self.lr, factor=0.75, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model
Example #8
0
    def build_model(self):
        input_layer = keras.layers.Input(batch_shape=self.input_shape)
        masked = keras.layers.Masking(mask_value=-1000)(input_layer)

        conv_2d = tf.keras.backend.expand_dims(masked, axis=-1)
        if self.use_1d:
            conv_1d = masked

        for i in range(self.depth):
            if self.use_inception:
                kernel_size_s = [self.window // (2**i) for i in range(3)]
                conv_l = []
                for i in range(len(kernel_size_s)):
                    layer = keras.layers.Conv2D(self.filters,
                                                (kernel_size_s[i], 1),
                                                padding='same',
                                                use_bias=True,
                                                activation='relu')(conv_2d)
                    conv_l.append(
                        keras.layers.Lambda((lambda x: x))(layer,
                                                           mask=masked[:, :,
                                                                       0]))

                conv_2d = keras.layers.Concatenate(axis=-1)(conv_l)
                # conv_2d_incep = keras.layers.Conv2D(self.filters, (int(self.window/2), 1),
                #                               padding='same')(conv_2d)
                # conv_2d_incep = keras.layers.Lambda((lambda x: x))(conv_2d_incep,
                #                                              mask=masked[:, :, 0])
                # conv_2d_incep = keras.layers.BatchNormalization()(conv_2d_incep)
                # conv_2d_incep = keras.layers.Activation(activation='relu')(conv_2d_incep)
            else:
                conv_2d = keras.layers.Conv2D(self.filters, (self.window, 1),
                                              padding='same')(conv_2d)
                conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                             mask=masked[:, :,
                                                                         0])
                # print('after conv2d: {}'.format(conv_2d))
                conv_2d = keras.layers.BatchNormalization()(conv_2d)
                conv_2d = keras.layers.Activation(activation='relu')(conv_2d)

                # if self.use_inception:
                #     conv_2d = keras.layers.Concatenate(axis=-1)([conv_2d, conv_2d_incep])

            if self.use_bottleneck:
                conv_2d = keras.layers.Conv2D(self.bottleneck_size, (1, 1),
                                              padding='same')(conv_2d)
                conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                             mask=masked[:, :,
                                                                         0])

            if self.use_1d:
                if self.use_inception:
                    kernel_size_s = [self.window // (2**i) for i in range(3)]
                    conv_l = []
                    for i in range(len(kernel_size_s)):
                        layer = keras.layers.Conv1D(self.filters,
                                                    kernel_size_s[i],
                                                    padding='same',
                                                    use_bias=True,
                                                    activation='relu')(conv_1d)
                        conv_l.append(
                            keras.layers.Lambda(
                                (lambda x: x))(layer, mask=masked[:, :, 0]))

                    conv_1d = keras.layers.Concatenate(axis=-1)(conv_l)
                    # conv_1d_incep = keras.layers.Conv1D(self.filters, int(self.window/2),
                    #                               padding='same')(conv_1d)
                    # conv_1d_incep = keras.layers.Lambda((lambda x: x))(conv_1d_incep,
                    #                                              mask=masked[:, :, 0])
                    # # print('after conv2d: {}'.format(conv_2d))
                    # conv_1d_incep = keras.layers.BatchNormalization()(conv_1d_incep)
                    # conv_1d_incep = keras.layers.Activation(activation='relu')(conv_1d_incep)
                else:
                    conv_1d = keras.layers.Conv1D(self.filters,
                                                  self.window,
                                                  padding='same')(conv_1d)
                    conv_1d = keras.layers.Lambda(
                        (lambda x: x))(conv_1d, mask=masked[:, :, 0])
                    # print('after conv2d: {}'.format(conv_2d))
                    conv_1d = keras.layers.BatchNormalization()(conv_1d)
                    conv_1d = keras.layers.Activation(
                        activation='relu')(conv_1d)

                # if self.use_inception:
                #     conv_1d = keras.layers.Concatenate(axis=-1)([conv_1d, conv_1d_incep])

                if self.use_bottleneck:
                    conv_1d = keras.layers.Conv1D(self.bottleneck_size,
                                                  1,
                                                  padding='same')(conv_1d)
                    conv_1d = keras.layers.Lambda(
                        (lambda x: x))(conv_1d, mask=masked[:, :, 0])

        conv_2d = keras.layers.Conv2D(1, (1, 1),
                                      padding='same',
                                      activation='relu')(conv_2d)
        conv_2d = keras.layers.Lambda((lambda x: x))(conv_2d,
                                                     mask=masked[:, :, 0])

        if self.use_1d:
            conv_1d = keras.layers.Conv1D(1,
                                          1,
                                          padding='same',
                                          activation='relu')(conv_1d)
            conv_1d = keras.layers.Lambda((lambda x: x))(conv_1d,
                                                         mask=masked[:, :, 0])

        print('after 1x1 conv2d: {}'.format(conv_2d))

        feats = tf.keras.backend.squeeze(conv_2d, -1)
        if self.use_1d:
            feats = keras.layers.Concatenate(axis=2)([feats, conv_1d])

        # feats = keras.layers.Conv1D(2 * self.filters, self.window,
        #                             padding='same', name='conv-final')(feats)
        # feats = keras.layers.Lambda((lambda x: x),
        #                             name='lambda_final')(feats,
        #                                                  mask=masked[:, :, 0])
        # print('after conv1d: {}'.format(feats))
        # feats = keras.layers.BatchNormalization()(feats)
        # feats = keras.layers.Activation(activation='relu')(feats)
        print('before gap: {}'.format(feats))
        gap_layer = keras.layers.GlobalAveragePooling1D()(feats,
                                                          mask=masked[:, :, 0])

        output_layer = keras.layers.Dense(self.filters,
                                          activation='relu')(gap_layer)
        output_layer = keras.layers.Dense(self.filters,
                                          activation='relu',
                                          use_bias=False)(output_layer)
        output_layer = coral.CoralOrdinal(self.nb_classes)(output_layer)
        # output_layer = keras.layers.Dense(self.nb_classes,
        #                                   activation='softmax')(output_layer)

        model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model.compile(
            loss=coral.OrdinalCrossEntropy(num_classes=self.nb_classes),
            optimizer=keras.optimizers.Adam(self.lr),
            metrics=[coral.MeanAbsoluteErrorLabels()])

        reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss',
                                                      factor=0.75,
                                                      patience=50,
                                                      min_lr=0.0001)
        file_path = self.output_directory + 'best_model.hdf5'
        model_checkpoint = keras.callbacks.ModelCheckpoint(
            filepath=file_path,
            monitor='val_mean_absolute_error_labels',
            save_best_only=True,
            mode='min')

        stop_early = keras.callbacks.EarlyStopping(monitor='val_loss',
                                                   restore_best_weights=True,
                                                   patience=200)

        schedule = StepDecay(initAlpha=self.lr, factor=0.85, dropEvery=20)
        lr_decay = keras.callbacks.LearningRateScheduler(schedule)

        self.callbacks = [reduce_lr, model_checkpoint, stop_early, lr_decay]

        return model