Пример #1
0
#Author: Michail Mamalakis
#Version: 0.1
#Licence:
#email:[email protected]

from __future__ import division, print_function
from covid_pipeline import main

# name of weights store of main segmentation
mn = main.main('covid_80')

# run train of main segmentation
mn.train_run()
# run test of main segmentation
# mn.test_run()
Пример #2
0
#Author: Michail Mamalakis
#Version: 0.1
#Licence:
#email:[email protected]

from __future__ import division, print_function
from covid_pipeline import main

# name of weights store of main segmentation
mn = main.main('sgd')

# run train of main segmentation
mn.train_run()
# run test of main segmentation
# mn.test_run()
Пример #3
0
#Author: Michail Mamalakis
#Version: 0.1
#Licence:
#email:[email protected]

from __future__ import division, print_function
from covid_pipeline import main

# name of weights store of main segmentation
mn = main.main('covid_300')

# run train of main segmentation
mn.train_run()
# run test of main segmentation
# mn.test_run()
Пример #4
0
    def run_training(self, loss_type, model_structure, X, Y):
        # MPU: data paraller mode deep learning
        # GPU: model paraller mode deep learning
        #one-hot encoder mask binary labels
        #if self.path_case=='main':
        #	Y = utils.to_categorical(Y,(np.array(Y).shape[1]+1))
        #	Y=Y.reshape(np.array(Y).shape[0],np.array(Y).shape[2],np.array(Y).shape[3],np.array(Y).shape[4])
        #	print(np.array(Y).shape)
        if self.ram == 'GPU':
            model_structure = multi_gpu_model(model_structure, gpus=self.ngpu)
            self.batch_size = self.batch_size * self.ngpu
        #define metrics for training
        metrics_algorithm = self.metrics_generator()
        loss = self.load_loss(loss_type)
        model_structure.compile(optimizer=self.optimizer,
                                loss=loss,
                                metrics=metrics_algorithm)
        if self.normalize_image == 'True':
            X = regularization.normalize(X)
        if self.shuffle == 'True':
            # shuffle images and masks in parallel
            rng_state = np.random.get_state()
            np.random.shuffle(X)
            np.random.set_state(rng_state)
            np.random.shuffle(Y)
        #define callbacks
        self.model_json = model_structure.to_json()
        self.callbacks = self.callbacks_define(self.monitor, self.weight_name)
        #validation data
        cn = create_net.create_net(self.main_model)
        if self.validation == 'on':
            for cross_val_num in range(int(self.cross_validation_number)):
                print("cross validation run: ", cross_val_num, "/",
                      int(self.cross_validation_number))
                if cross_val_num != 0:
                    model_structure = cn.net([], [], self.path_case,
                                             self.height, self.channels,
                                             (self.classes), self.width)
                    rng_state = np.random.get_state()
                    np.random.shuffle(X)
                    np.random.set_state(rng_state)
                    np.random.shuffle(Y)
                    model_structure.compile(optimizer=self.optimizer,
                                            loss=loss,
                                            metrics=metrics_algorithm)
                    self.callbacks = self.callbacks_define(
                        self.monitor,
                        (self.weight_name + '_' + str(cross_val_num)))

                h_d = handle_data.handle_data(X, Y, self.path_case)

                training_augment, train_steps_per_epoch, validation_augment, val_steps_per_epoch = h_d.validation_data(
                )
                if self.ram == 'CPU':
                    history = model_structure.fit_generator(
                        training_augment,
                        epochs=self.epochs,
                        steps_per_epoch=train_steps_per_epoch,
                        verbose=1,
                        callbacks=self.callbacks,
                        validation_data=validation_augment,
                        validation_steps=val_steps_per_epoch)

                if self.ram == 'MPU':
                    self.batch_size = self.batch_size
                    history = model_structure.fit_generator(
                        training_augment,
                        epochs=self.epochs,
                        steps_per_epoch=train_steps_per_epoch,
                        verbose=1,
                        callbacks=self.callbacks,
                        validation_data=validation_augment,
                        validation_steps=val_steps_per_epoch,
                        workers=self.num_cores,
                        use_multiprocessing=True)
                #call test for evaluate results..P.S. need modification this part: TODDO!!!!
                mn = main.main(self.path_case)
                print(h_d.Xval.shape, h_d.Yval.shape)
                mn.X = np.array(h_d.Xval)
                mn.Y = np.array(h_d.Yval)
                print('test output')
                print(mn.X[1, :, :, 1], mn.Y[1, :])
                filepath = str(self.weight_name + '_' + str(cross_val_num))
                file = str(self.weight_name)
                if cross_val_num != 0:
                    mn.test_run(
                        model_structure, [filepath],
                        '/fastdata/mer17mm/private/Data/',
                        ['tuberculosis', 'pneumonia', 'healthy', 'COVID-19'],
                        2)
                else:
                    mn.test_run(
                        model_structure, [file],
                        '/fastdata/mer17mm/private/Data/',
                        ['tuberculosis', 'pneumonia', 'healthy', 'COVID-19'],
                        2)
        else:
            h_d2 = handle_data.handle_data(X, Y, self.path_case)
            training_augment, train_steps_per_epoch = h_d2.no_validation_data()
            if self.ram == 'CPU':
                history = model_structure.fit_generator(
                    training_augment,
                    epochs=self.epochs,
                    steps_per_epoch=train_steps_per_epoch,
                    verbose=1,
                    callbacks=self.callbacks)
            if self.ram == 'MPU':
                self.batch_size = self.batch_size
                history = model_structure.fit_generator(
                    training_augment,
                    epochs=self.epochs,
                    steps_per_epoch=train_steps_per_epoch,
                    verbose=1,
                    callbacks=self.callbacks,
                    workers=self.num_cores,
                    use_multiprocessing=True)
            mn = main.main(self.path_case)
            mn.Y = np.array(h_d2.Y)
            mn.X = np.array(h_d2.X)
            print(h_d2.X.shape, h_d2.Y.shape)
            mn.test_run(model_structure, [file],
                        '/fastdata/mer17mm/private/Data/',
                        ['tuberculosis', 'COVID-19', 'healthy', 'pneumonia'],
                        2)

        return model_structure, history
Пример #5
0
#Author: Michail Mamalakis
#Version: 0.1
#Licence:
#email:[email protected]

from __future__ import division, print_function
from covid_pipeline import main

# name of weights store of main segmentation
mn = main.main('unet')

# run train of main segmentation
mn.train_run()
# run test of main segmentation
# mn.test_run()