Esempio n. 1
0
    def _wide_deep_block(self, layers):
        if check_dims(self.input_dim_3, self.is_2D, self.is_3D) == '2D':
            res = self._wide_conv(Conv1D, layers)
        elif check_dims(self.input_dim_3, self.is_2D, self.is_3D) == '3D':
            res = self._wide_conv(Conv2D, layers)

        return res
Esempio n. 2
0
    def __init__(self, dims):
        self.valid = utils.check_dims(dims, "init")
        self.errors = False
        self.dimensions = dims if self.valid else None

        if self.valid:
            self.values = self.zeros()
Esempio n. 3
0
    def _init_model(self):
        # Check the input parameters for getting data shape is '2D' or '3D'
        self.dimstr = check_dims(self.input_dim_3, self.is_2D, self.is_3D)
        inputs = Input(shape=(self.input_dim_1, self.input_dim_2))

        # construct residual block chain.
        for i in range(self.n_res_layers):
            if i == 0:
                res = self._res_block(inputs, inputs)
            else:
                res = self._res_block(res, inputs)

        # using flatten or global average pooling to process Convolution result
        if not self.use_global:
            res = Flatten()(res)
        else:
            if self.dimstr == '2D':
                res = GlobalAveragePooling1D()(res)
            elif self.dimstr == '3D':
                res = GlobalAveragePooling2D()(res)

        # whether or not use dense net, also with how many layers to use
        if self.use_dense:
            for _ in range(self.n_dnn_layers):
                res = Dense(self.dnn_units)(res)
                if self.use_batch: res = BatchNormalization()(res)
                res = Activation('relu')(res)
                if self.use_dropout: res = Dropout(self.drop_ratio)(res)

        # check parameter 'loss' is given or not, if not, use default loss for model training.
        def _check_loss(model, loss, metrics, optimizer):
            if loss is not None:
                model.compile(loss=loss,
                              metrics=[metrics],
                              optimizer=optimizer)
            return model

        if self.n_classes == 2:  # this is binary class problem.
            out = Dense(self.n_classes, activation='sigmoid')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='binary_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes >= 2:  # this is multiclass problem.
            out = Dense(self.n_classes, activation='softmax')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='categorical_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes == -1:  # this is regression problem
            out = Dense(1)(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='mse',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        else:
            raise AttributeError(
                "Parameter 'n_classes' should be -1, 2 or up 2!")

        if not self.silence:
            print('Model structure summary:')
            model.summary()

        return model
Esempio n. 4
0
    def set_dimensions(self, dims):
        self.valid = utils.check_dims(dims, "init")
        self.dimensions = dims if self.valid else None
        self.values = self.zeros()

        return self
Esempio n. 5
0
    def _init_model(self):
        # According to parameters to check data is '2D' or '3D'
        self.dimstr = check_dims(self.input_dim_3, self.is_2D, self.is_3D)

        if self.dimstr == '2D':
            inputs = Input(shape=(self.input_dim_1, self.input_dim_2),
                           name='inputs')
        elif self.dimstr == '3D':
            inputs = Input(shape=(self.input_dim_1, self.input_dim_2,
                                  self.input_dim_3),
                           name='inputs')

        # loop for convolution layers
        for i in range(self.n_conv_layers):
            if i == 0:
                res = self._basic_cnn(inputs, i)
            else:
                res = self._basic_cnn(res, i)

        # whether or not use global average pooling layer
        if self.use_global:
            if self.dimstr == '2D':
                res = GlobalAveragePooling1D(name='global_1')(res)
            elif self.dimstr == '3D':
                res = GlobalAveragePooling2D(name='global_1')(res)
        else:  # if not global average pooling or Flatten Conv result
            res = Flatten()(res)

        # whether or not use Dense layer
        if self.use_dnn:
            for _ in range(self.n_dnn_layers):
                res = Dense(self.dnn_units, name='dense_1')(res)
                if self.use_batch:
                    res = BatchNormalization(name='dense_batch_1')(res)
                res = Activation(self.activation)(res)
                if self.use_dropout:
                    res = Dropout(self.drop_ratio, name='dense_drop_1')(res)

        # this is method private function to check whether or not loss is not given, then use default loss
        def _check_loss(model, loss, metrics, optimizer):
            if loss is not None:
                model.compile(loss=loss,
                              metrics=[metrics],
                              optimizer=optimizer)
            return model

        if self.n_classes == 2:  # this is binary class problem.
            out = Dense(self.n_classes, activation='sigmoid')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='binary_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes >= 2:  # this is multiclass problem.
            out = Dense(self.n_classes, activation='softmax')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='categorical_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes == -1:  # this is regression problem
            out = Dense(1)(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='mse',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        else:
            raise AttributeError(
                "Parameter 'n_classes' should be -1, 2 or up 2!")

        print('Model structure summary:')
        model.summary()

        return model
Esempio n. 6
0
    def _init_model(self):
        # Check given parameters to judge data is '2D' or '3D'
        self.dimstr = check_dims(self.input_dim_3, self.is_2D, self.is_3D)

        if self.dimstr == '2D':
            inputs = Input(shape=(self.input_dim_1, self.input_dim_2))
        elif self.dimstr == '3D':
            inputs = Input(shape=(self.input_dim_1, self.input_dim_2,
                                  self.input_dim_3))

        # Whether first use Convolutional layer, can be choosen.
        if self.first_conv:
            for _ in range(self.n_first_convs):
                if self.dimstr == '2D':
                    res_conv = Conv1D(self.conv_units,
                                      self.kernel_size,
                                      self.strides,
                                      padding=self.padding)(inputs)
                elif self.dimstr == '3D':
                    res_conv = Conv2D(self.conv_units,
                                      self.kernel_size,
                                      self.strides,
                                      padding=self.padding)(inputs)
        else:
            res_conv = inputs

        # Here is Wide & Deep model Block
        for i in range(self.n_wide_layers):
            if i == 0:
                res = self._wide_deep_block(res_conv)
            else:
                res = self._wide_deep_block(res)

        # Whether to use global avarega pooling or just Flatten concolutional result
        if self.use_global:
            if self.dimstr == '2D':
                res = GlobalAveragePooling1D()(res)
            elif self.dimstr == '3D':
                res = GlobalAveragePooling2D()(res)
        else:
            res = Flatten()(res)

        # Whether to use Dense layers
        if self.use_dnn:
            for _ in range(self.n_dnn_layers):
                res = Dense(self.dnn_units)(res)
                if self.use_batch:
                    res = BatchNormalization()(res)
                res = Activation(self.activation)(res)
                if self.use_dropout:
                    res = Dropout(self.drop_ratio)(res)

        # this is method private function to check whether or not loss is not given, then use default loss
        def _check_loss(model, loss, metrics, optimizer):
            if loss is not None:
                model.compile(loss=loss,
                              metrics=[metrics],
                              optimizer=optimizer)
            return model

        if self.n_classes == 2:  # this is binary class problem.
            out = Dense(self.n_classes, activation='sigmoid')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='binary_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes >= 2:  # this is multiclass problem.
            out = Dense(self.n_classes, activation='softmax')(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='categorical_crossentropy',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        elif self.n_classes == -1:  # this is regression problem
            out = Dense(1)(res)
            model = Model(inputs, out)
            if self.loss is None:
                model.compile(loss='mse',
                              metrics=[self.metrics],
                              optimizer=self.optimizer)
            else:
                _check_loss(model, self.loss, self.metrics, self.optimizer)
        else:
            raise AttributeError(
                "Parameter 'n_classes' should be -1, 2 or up 2!")

        print('Model structure summary:')
        model.summary()

        return model