예제 #1
0
    def train(self,
              data=None,
              trgt=None,
              ifold=0,
              hidden_neurons=None,
              layer=1):
        # Change elements equal to zero to one
        if hidden_neurons is None:
            hidden_neurons = [1]
        for i in range(len(hidden_neurons)):
            if hidden_neurons[i] == 0:
                hidden_neurons[i] = 1
        if (layer <= 0) or (layer > len(hidden_neurons)):
            print("[-] Error: The parameter layer must be greater than zero and less " \
                  "or equal to the length of list hidden_neurons")
            return -1

        neurons_str = self.get_neurons_str(data, hidden_neurons[:layer])

        model_str = os.path.join(
            self.model_save_path,
            self.prefix_str + "_{}_neurons".format(neurons_str))

        trgt_sparse = np_utils.to_categorical(trgt.astype(int))

        file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
        if os.path.exists(file_name):
            if self.verbose:
                print('File %s exists' % file_name)
            # load model
            file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
            classifier = load_model(
                file_name,
                custom_objects={
                    '%s' % self.parameters["HyperParameters"]["loss"]:
                    self.lossFunction
                })
            model_str = os.path.join(
                self.logs_save_path,
                self.prefix_str + "_{}_neurons".format(neurons_str))

            file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
            trn_desc = joblib.load(file_name)
            return ifold, classifier, trn_desc

        train_id, test_id = self.CVO[ifold]

        best_init = 0
        best_loss = 999

        classifier = []
        trn_desc = {}

        norm_data = self.normalize_data(data, ifold)

        for i_init in range(self.parameters["HyperParameters"]["n_inits"]):
            print(
                'Neural Network - Layer: %i - Topology: %s - Fold %i of %i Folds -  Init %i of %i Inits'
                % (layer, neurons_str, ifold + 1,
                   self.parameters["HyperParameters"]["n_folds"], i_init + 1,
                   self.parameters["HyperParameters"]["n_inits"]))
            model = Sequential()

            for ilayer in range(layer):
                if ilayer == 1:
                    #if bool(self.parameters["HyperParameters"]["dropout"]):
                    #    model.add(Dropout(int(self.parameters["HyperParameters"]["dropout_parameter"])))

                    if self.parameters["HyperParameters"][
                            "regularization"] == "l1":
                        model.add(
                            Dense(units=hidden_neurons[ilayer],
                                  input_dim=data.shape[1],
                                  kernel_initializer=self.parameters[
                                      "HyperParameters"]["kernel_initializer"],
                                  kernel_regularizer=regularizers.l1(
                                      self.parameters["HyperParameters"]
                                      ["regularization_parameter"])))

                    elif self.parameters["HyperParameters"][
                            "regularization"] == "l2":
                        model.add(
                            Dense(hidden_neurons[ilayer],
                                  input_dim=data.shape[1],
                                  kernel_initializer=self.parameters[
                                      "HyperParameters"]["kernel_initializer"],
                                  kernel_regularizer=regularizers.l2(
                                      self.parameters["HyperParameters"]
                                      ["regularization_parameter"])))
                    else:
                        model.add(
                            Dense(
                                hidden_neurons[ilayer],
                                input_dim=data.shape[1],
                                kernel_initializer=self.parameters[
                                    "HyperParameters"]["kernel_initializer"]))
                else:
                    #if bool(self.parameters["HyperParameters"]["dropout"]):
                    #    model.add(Dropout(int(self.parameters["HyperParameters"]["dropout_parameter"])))

                    if self.parameters["HyperParameters"][
                            "regularization"] == "l1":
                        model.add(
                            Dense(units=hidden_neurons[ilayer],
                                  kernel_initializer=self.parameters[
                                      "HyperParameters"]["kernel_initializer"],
                                  kernel_regularizer=regularizers.l1(
                                      self.parameters["HyperParameters"]
                                      ["regularization_parameter"])))

                    elif self.parameters["HyperParameters"][
                            "regularization"] == "l2":
                        model.add(
                            Dense(hidden_neurons[ilayer],
                                  kernel_initializer=self.parameters[
                                      "HyperParameters"]["kernel_initializer"],
                                  kernel_regularizer=regularizers.l2(
                                      self.parameters["HyperParameters"]
                                      ["regularization_parameter"])))
                    else:
                        model.add(
                            Dense(
                                hidden_neurons[ilayer],
                                kernel_initializer=self.parameters[
                                    "HyperParameters"]["kernel_initializer"]))

                model.add(
                    Activation(self.parameters["HyperParameters"]
                               ["hidden_activation_function"]))

            # Add Output Layer
            model.add(
                Dense(units=trgt_sparse.shape[1],
                      kernel_initializer=self.parameters["HyperParameters"]
                      ["kernel_initializer"]))
            model.add(
                Activation(self.parameters["HyperParameters"]
                           ["classifier_output_activation_function"]))

            model.compile(
                loss=self.lossFunction,
                optimizer=self.optmizer,
                metrics=self.parameters["HyperParameters"]["metrics"])

            # Train model
            earlyStopping = callbacks.EarlyStopping(
                monitor=self.parameters["callbacks"]["EarlyStopping"]
                ["monitor"],
                patience=self.parameters["callbacks"]["EarlyStopping"]
                ["patience"],
                verbose=self.verbose,
                mode='auto')
            class_weights = getGradientWeights(trgt[train_id])
            init_trn_desc = model.fit(
                norm_data[train_id],
                trgt_sparse[train_id],
                epochs=self.parameters["HyperParameters"]["n_epochs"],
                batch_size=self.parameters["HyperParameters"]["batch_size"],
                callbacks=[earlyStopping],
                verbose=self.verbose,
                validation_data=(norm_data[test_id], trgt_sparse[test_id]),
                shuffle=True,
                class_weight=class_weights)
            if np.min(init_trn_desc.history['val_loss']) < best_loss:
                best_init = i_init
                trn_desc['best_init'] = best_init
                best_loss = np.min(init_trn_desc.history['val_loss'])
                classifier = model
                trn_desc['epochs'] = init_trn_desc.epoch

                for imetric in range(
                        len(self.parameters["HyperParameters"]["metrics"])):
                    if self.parameters["HyperParameters"]["metrics"][
                            imetric] == 'accuracy':
                        metric = 'acc'
                    else:
                        metric = self.parameters["HyperParameters"]["metrics"][
                            imetric]
                    trn_desc[metric] = init_trn_desc.history[metric]
                    trn_desc['val_' + metric] = init_trn_desc.history['val_' +
                                                                      metric]

                trn_desc['loss'] = init_trn_desc.history['loss']
                trn_desc['val_loss'] = init_trn_desc.history['val_loss']

        # save model
        file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
        classifier.save(file_name)

        # save train history
        model_str = os.path.join(
            self.logs_save_path,
            self.prefix_str + "_{}_neurons".format(neurons_str))
        file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
        joblib.dump([trn_desc], file_name, compress=9)

        return ifold, classifier, trn_desc
예제 #2
0
    def train_classifier(self,
                         data=None,
                         trgt=None,
                         ifold=0,
                         hidden_neurons=None,
                         layer=1):
        if hidden_neurons is None:
            hidden_neurons = [1]
        for i in range(len(hidden_neurons)):
            if hidden_neurons[i] == 0:
                hidden_neurons[i] = 1

        if (layer <= 0) or (layer > len(hidden_neurons)):
            print(
                "[-] Error: The parameter layer must be greater than zero and less "
                + "or equal to the length of list hidden_neurons")
            return -1

        # Turn trgt to one-hot encoding
        trgt_sparse = np_utils.to_categorical(trgt.astype(int))

        neurons_str = self.get_neurons_str(data, hidden_neurons[:layer])

        model_str = os.path.join(
            self.save_path,
            self.classifier_prefix_str + "_{}_neurons".format(neurons_str))

        file_name = '{}_fold_{:d}_model.h5'.format(model_str, ifold)
        if os.path.exists(file_name):
            if self.verbose:
                print('File {} exists'.format(file_name))
            # load model
            file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
            classifier = load_model(
                file_name,
                custom_objects={
                    '%s' % self.parameters["HyperParameters"]["classifier_loss"]:
                    self.classifier_lossFunction
                })
            file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
            trn_desc = joblib.load(file_name)
            return ifold, classifier, trn_desc

        train_id, test_id = self.CVO[ifold]

        norm_data = self.normalize_data(data, ifold)

        best_init = 0
        best_loss = 999

        classifier = []
        trn_desc = {}

        for i_init in range(self.parameters["HyperParameters"]["n_inits"]):
            print(
                'Classifier - Layer: %i - Topology: %s - Fold: %i of %i Folds -  Init: %i of %i Inits'
                % (layer, neurons_str, ifold + 1,
                   self.parameters["HyperParameters"]["n_folds"], i_init + 1,
                   self.parameters["HyperParameters"]["n_inits"]))
            # Start the model
            model = Sequential()
            # Add layers
            for ilayer in range(1, layer + 1):
                # Get the weights of ilayer
                neurons_str = self.get_neurons_str(data,
                                                   hidden_neurons[:ilayer])

                previous_model_str = os.path.join(
                    self.save_path,
                    self.sae_prefix_str + "_{}_neurons".format(neurons_str))

                file_name = '%s_fold_%i_model.h5' % (previous_model_str, ifold)

                # Check if the layer was trained
                if not os.path.exists(file_name):
                    self.train_layer(data=data,
                                     trgt=data,
                                     ifold=ifold,
                                     layer=ilayer,
                                     hidden_neurons=hidden_neurons[:ilayer])

                layer_model = load_model(
                    file_name,
                    custom_objects={
                        '%s' % self.parameters["HyperParameters"]["loss"]:
                        self.lossFunction
                    })
                encoder_weights = layer_model.layers[0].get_weights()
                if ilayer == 1:
                    model.add(
                        Dense(units=hidden_neurons[0],
                              input_dim=norm_data.shape[1],
                              weights=encoder_weights,
                              trainable=self.parameters["TechniqueParameters"]
                              ["allow_change_weights"]))
                else:
                    model.add(
                        Dense(units=hidden_neurons[ilayer - 1],
                              weights=encoder_weights,
                              trainable=self.parameters["TechniqueParameters"]
                              ["allow_change_weights"]))

                model.add(
                    Activation(self.parameters["HyperParameters"]
                               ["encoder_activation_function"]))

            # Add Output Layer
            model.add(
                Dense(units=trgt_sparse.shape[1],
                      bias_initializer=self.parameters["HyperParameters"]
                      ["bias_initializer"],
                      kernel_initializer=self.parameters["HyperParameters"]
                      ["kernel_initializer"]))
            model.add(
                Activation(self.parameters["HyperParameters"]
                           ["classifier_output_activation_function"]))

            model.compile(
                loss=self.classifier_lossFunction,
                optimizer=self.optmizer,
                metrics=self.parameters["HyperParameters"]["metrics"])
            # Train model

            earlyStopping = callbacks.EarlyStopping(
                monitor=self.parameters["callbacks"]["EarlyStopping"]
                ["monitor"],
                patience=self.parameters["callbacks"]["EarlyStopping"]
                ["patience"],
                verbose=self.verbose,
                mode='auto')
            class_weights = getGradientWeights(trgt[train_id])
            init_trn_desc = model.fit(
                norm_data[train_id],
                trgt_sparse[train_id],
                epochs=int(self.parameters["HyperParameters"]
                           ["finetuning_n_epochs"]),  #FINE-TUNING
                batch_size=self.parameters["HyperParameters"]["batch_size"],
                callbacks=[earlyStopping],
                verbose=self.verbose,
                validation_data=(norm_data[test_id], trgt_sparse[test_id]),
                shuffle=True,
                class_weight=class_weights)
            if np.min(init_trn_desc.history['val_loss']) < best_loss:
                best_init = i_init
                trn_desc['best_init'] = best_init
                best_loss = np.min(init_trn_desc.history['val_loss'])
                classifier = model
                trn_desc['epochs'] = init_trn_desc.epoch

                for imetric in range(
                        len(self.parameters["HyperParameters"]["metrics"])):
                    if self.parameters["HyperParameters"]["metrics"][
                            imetric] == 'accuracy':
                        metric = 'acc'
                    else:
                        metric = self.parameters["HyperParameters"]["metrics"][
                            imetric]
                    trn_desc[metric] = init_trn_desc.history[metric]
                    trn_desc['val_' + metric] = init_trn_desc.history['val_' +
                                                                      metric]

                trn_desc['loss'] = init_trn_desc.history['loss']
                trn_desc['val_loss'] = init_trn_desc.history['val_loss']

        # save model
        file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
        classifier.save(file_name)
        file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
        joblib.dump([trn_desc], file_name, compress=9)

        return ifold, classifier, trn_desc
예제 #3
0
    def trainClassifier(self, data=None, trgt=None, ifold=0, hidden_neurons=[1], layer=1, regularizer=None, regularizer_param=None):
        for i in range(len(hidden_neurons)):
            if hidden_neurons[i] == 0:
                hidden_neurons[i] = 1

        if (layer <= 0) or (layer > len(hidden_neurons)):
            print "[-] Error: The parameter layer must be greater than zero and less or equal to the length of list hidden_neurons"
            return -1

        # Turn trgt to one-hot encoding
        trgt_sparse = np_utils.to_categorical(trgt.astype(int))

        neurons_str = self.getNeuronsString(data,hidden_neurons[:layer])

        if regularizer != None and len(regularizer) != 0:
            model_str = os.path.join(self.save_path,
                                              "classifierModel_%i_noveltyID_%s_neurons_%s_regularizer(%.3f)"%(self.inovelty, neurons_str, regularizer, regularizer_param)
                                             )

        else:
            model_str = os.path.join(self.save_path,"classifierModel_%i_noveltyID_%s_neurons"%(self.inovelty, neurons_str))
                                                          
        if not self.development_flag:
            file_name = '%s_fold_%i_model.h5'%(model_str,ifold)
            if os.path.exists(file_name):
                if self.trn_params.params['verbose']:
                    print 'File %s exists'%(file_name)
                # load model
                file_name  = '%s_fold_%i_model.h5'%(model_str,ifold)
                classifier = load_model(file_name, custom_objects={'%s'%self.trn_params.params['loss']: self.lossFunction})
                file_name  = '%s_fold_%i_trn_desc.jbl'%(model_str,ifold)
                trn_desc   = joblib.load(file_name)
                return ifold, classifier, trn_desc
        else:
            file_name = '%s_fold_%i_model_dev.h5'%(model_str,ifold)
            if os.path.exists(file_name):
                if self.trn_params.params['verbose']:
                    print 'File %s exists'%(file_name)
                # load model
                file_name  = '%s_fold_%i_model_dev.h5'%(model_str,ifold)
                classifier = load_model(file_name, custom_objects={'%s'%self.trn_params.params['loss']: self.lossFunction})
                file_name  = '%s_fold_%i_trn_desc_dev.jbl'%(model_str,ifold)
                trn_desc   = joblib.load(file_name)
                return ifold, classifier, trn_desc

        train_id, test_id = self.CVO[ifold]

        norm_data = self.normalizeData(data, ifold)

        best_init = 0
        best_loss = 999

        classifier = []
        trn_desc = {}

        for i_init in range(self.n_inits):
            print 'Classifier - Layer: %i - Topology: %s - Fold: %i of %i Folds -  Init: %i of %i Inits'%(layer,
                                                                                                          neurons_str,
                                                                                                          ifold+1,
                                                                                                          self.n_folds,
                                                                                                          i_init+1,
                                                                                                          self.n_inits)
            # Start the model
            model = Sequential()
            # Add layers
            for ilayer in range(1,layer+1):
                 # Get the weights of ilayer
                neurons_str = self.getNeuronsString(data, hidden_neurons[:ilayer])
                if regularizer != None and len(regularizer) != 0:
                    previous_model_str = os.path.join(self.save_path,
                                                      "saeModel_%i_noveltyID_%s_neurons_%s_regularizer(%.3f)"%(self.inovelty, neurons_str, regularizer, regularizer_param)
                                                     )

                else:
                    previous_model_str = os.path.join(self.save_path,"saeModel_%i_noveltyID_%s_neurons"%(self.inovelty, neurons_str))

                if not self.development_flag:
                    file_name = '%s_fold_%i_model.h5'%(previous_model_str,ifold)
                else:
                    file_name = '%s_fold_%i_model_dev.h5'%(previous_model_str,ifold)

                # Check if the layer was trained
                if not os.path.exists(file_name):
                    self.trainLayer(data=data,
                                    trgt=data,
                                    ifold=ifold,
                                    layer=ilayer,
                                    hidden_neurons = hidden_neurons[:ilayer],
                                    regularizer=regularizer,
                                    regularizer_param=regularizer_param)

                layer_model = load_model(file_name, custom_objects={'%s'%self.trn_params.params['loss']: self.lossFunction})
                layer_weights = layer_model.layers[0].get_weights()
                if ilayer == 1:
                    model.add(Dense(units=hidden_neurons[0], input_dim=norm_data.shape[1], weights=layer_weights,
                                    trainable=self.allow_change_weights))
                else:
                    model.add(Dense(units=hidden_neurons[ilayer-1], weights=layer_weights, trainable=self.allow_change_weights))

                model.add(Activation(self.trn_params.params['hidden_activation']))

            # Add Output Layer
            model.add(Dense(units=trgt_sparse.shape[1], kernel_initializer="uniform"))
            model.add(Activation('softmax'))

            model.compile(loss=self.lossFunction,
                          optimizer=self.optmizer,
                          metrics=self.trn_params.params['metrics'])
            # Train model
            earlyStopping = callbacks.EarlyStopping(monitor='val_loss',
                                                    patience=self.trn_params.params['patience'],
                                                    verbose=self.trn_params.params['train_verbose'],
                                                    mode='auto')
            class_weights = getGradientWeights(trgt[train_id])
            init_trn_desc = model.fit(norm_data[train_id], trgt_sparse[train_id],
                                      epochs=self.trn_params.params['n_epochs'],
                                      batch_size=self.trn_params.params['batch_size'],
                                      callbacks=[earlyStopping],
                                      verbose=self.trn_params.params['verbose'],
                                      validation_data=(norm_data[test_id], trgt_sparse[test_id]),
                                      shuffle=True,
                                      class_weight = class_weights  
                                     )
            if np.min(init_trn_desc.history['val_loss']) < best_loss:
                best_init = i_init
                best_loss = np.min(init_trn_desc.history['val_loss'])
                classifier = model
                trn_desc['epochs'] = init_trn_desc.epoch

                for imetric in range(len(self.trn_params.params['metrics'])):
                    if self.trn_params.params['metrics'][imetric] == 'accuracy':
                        metric = 'acc'
                    else:
                        metric = self.trn_params.params['metrics'][imetric]
                    trn_desc[metric] = init_trn_desc.history[metric]
                    trn_desc['val_'+metric] = init_trn_desc.history['val_'+metric]

                trn_desc['loss'] = init_trn_desc.history['loss']
                trn_desc['val_loss'] = init_trn_desc.history['val_loss']

        # save model
        if not self.development_flag:
            file_name = '%s_fold_%i_model.h5'%(model_str,ifold)
            classifier.save(file_name)
            file_name = '%s_fold_%i_trn_desc.jbl'%(model_str,ifold)
            joblib.dump([trn_desc],file_name,compress=9)
        else:
            file_name = '%s_fold_%i_model_dev.h5'%(model_str,ifold)
            classifier.save(file_name)
            file_name = '%s_fold_%i_trn_desc_dev.jbl'%(model_str,ifold)
            joblib.dump([trn_desc],file_name,compress=9)
        return ifold, classifier, trn_desc
    def train_classifier(self, data=None, trgt=None, ifold=0, hidden_neurons=None, layer=1):
        if hidden_neurons is None:
            hidden_neurons = [1]
        for i in range(len(hidden_neurons)):
            if hidden_neurons[i] == 0:
                hidden_neurons[i] = 1

        if (layer <= 0) or (layer > len(hidden_neurons)):
            print("[-] Error: The parameter layer must be greater than zero and less " +
                  "or equal to the length of list hidden_neurons")
            return -1

        # Turn trgt to one-hot encoding
        trgt_sparse = np_utils.to_categorical(trgt.astype(int))

        neurons_str = self.get_neurons_str(data, hidden_neurons[:layer])

        model_str = os.path.join(self.save_path,
                                 self.classifier_prefix_str + "_{}_neurons".format(neurons_str))

        file_name = '{}_fold_{:d}_model.h5'.format(model_str, ifold)
        if os.path.exists(file_name):
            if self.verbose:
                print('File {} exists'.format(file_name))
            # load model
            file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
            classifier = load_model(file_name, custom_objects={
                '%s' % self.parameters["HyperParameters"]["loss"]: self.lossFunction})
            file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
            trn_desc = joblib.load(file_name)
            return ifold, classifier, trn_desc

        train_id, test_id = self.CVO[ifold]

        norm_data = self.normalize_data(data, ifold)

        best_init = 0
        best_loss = 999

        classifier = []
        trn_desc = {}

        for i_init in range(self.parameters["HyperParameters"]["n_inits"]):
            print('Classifier - Layer: %i - Topology: %s - Fold: %i of %i Folds -  Init: %i of %i Inits' % (layer,
                                                                                                            neurons_str,
                                                                                                            ifold + 1,
                                                                                                            self.parameters["HyperParameters"]["n_folds"],
                                                                                                            i_init + 1,
                                                                                                            self.parameters["HyperParameters"]["n_inits"]))
            # Start the model
            model = Sequential()
            # Add layers
            for ilayer in range(1, layer + 1):
                # Get the weights of ilayer
                neurons_str = self.get_neurons_str(data, hidden_neurons[:ilayer])

                previous_model_str = os.path.join(self.save_path,
                                                  self.sae_prefix_str + "_{}_neurons".format(neurons_str))

                file_name = '%s_fold_%i_model.h5' % (previous_model_str, ifold)

                # Check if the layer was trained
                if not os.path.exists(file_name):
                    self.train_layer(data=data,
                                     trgt=data,
                                     ifold=ifold,
                                     layer=ilayer,
                                     hidden_neurons=hidden_neurons[:ilayer])

                layer_model = load_model(file_name, custom_objects={
                    '%s' % self.parameters["HyperParameters"]["loss"]: self.lossFunction})
                encoder_weights = layer_model.layers[0].get_weights()
                if ilayer == 1:
                    model.add(Dense(units=hidden_neurons[0], input_dim=norm_data.shape[1], weights=encoder_weights,
                                    trainable=self.parameters["TechniqueParameters"]["allow_change_weights"]))
                else:
                    model.add(Dense(units=hidden_neurons[ilayer - 1], weights=encoder_weights,
                                    trainable=self.parameters["TechniqueParameters"]["allow_change_weights"]))

                model.add(Activation(self.parameters["HyperParameters"]["encoder_activation_function"]))

            # Add Output Layer
            model.add(Dense(units=trgt_sparse.shape[1],
                            bias_initializer=self.parameters["HyperParameters"]["bias_initializer"],
                            kernel_initializer=self.parameters["HyperParameters"]["kernel_initializer"]))
            model.add(Activation(self.parameters["HyperParameters"]["classifier_output_activation_function"]))

            model.compile(loss=self.lossFunction,
                          optimizer=self.optmizer,
                          metrics=self.parameters["HyperParameters"]["metrics"])
            # Train model

            earlyStopping = callbacks.EarlyStopping(monitor=self.parameters["callbacks"]["EarlyStopping"]["monitor"],
                                                    patience=self.parameters["callbacks"]["EarlyStopping"]["patience"],
                                                    verbose=self.verbose,
                                                    mode='auto')
            class_weights = getGradientWeights(trgt[train_id])
            init_trn_desc = model.fit(norm_data[train_id], trgt_sparse[train_id],
                                      epochs=int(self.parameters["HyperParameters"]["finetuning_n_epochs"]), #FINE-TUNING
                                      batch_size=self.parameters["HyperParameters"]["batch_size"],
                                      callbacks=[earlyStopping],
                                      verbose=self.verbose,
                                      validation_data=(norm_data[test_id], trgt_sparse[test_id]),
                                      shuffle=True,
                                      class_weight=class_weights
                                      )
            if np.min(init_trn_desc.history['val_loss']) < best_loss:
                best_init = i_init
                trn_desc['best_init'] = best_init
                best_loss = np.min(init_trn_desc.history['val_loss'])
                classifier = model
                trn_desc['epochs'] = init_trn_desc.epoch

                for imetric in range(len(self.parameters["HyperParameters"]["metrics"])):
                    if self.parameters["HyperParameters"]["metrics"][imetric] == 'accuracy':
                        metric = 'acc'
                    else:
                        metric = self.parameters["HyperParameters"]["metrics"][imetric]
                    trn_desc[metric] = init_trn_desc.history[metric]
                    trn_desc['val_' + metric] = init_trn_desc.history['val_' + metric]

                trn_desc['loss'] = init_trn_desc.history['loss']
                trn_desc['val_loss'] = init_trn_desc.history['val_loss']

        # save model
        file_name = '%s_fold_%i_model.h5' % (model_str, ifold)
        classifier.save(file_name)
        file_name = '%s_fold_%i_trn_desc.jbl' % (model_str, ifold)
        joblib.dump([trn_desc], file_name, compress=9)

        return ifold, classifier, trn_desc