def visual_results_jsrt_only(model_name, args):
    print "Visual results for model {} JSRT only".format(model_name)
    imgs, blobs = jsrt.load(set_name='jsrt140p')
    pred_blobs = detect.read_blobs('data/{}-jsrt140p-pred-blobs.pkl'.format(
        args.detector))
    masks = np.load('data/aam-jsrt140p-pred-masks.npy')
    rois = create_rois(imgs, masks, pred_blobs, args)
    folds = KFold(n_splits=5, shuffle=True,
                  random_state=util.FOLDS_SEED).split(imgs)
    fold_idx = 0
    for tr, te in folds:
        model.load('data/' + model.name + '.fold-{}'.format(fold_idx + 1))
        model = neural.create_network(model_name, args,
                                      (1, args.roi_size, args.roi_size))
        X_tr, Y_tr, X_te, Y_te = neural.create_train_test_sets(
            real_blobs_tr, pred_blobs_tr, rois_tr, real_blobs_te,
            pred_blobs_te, rois_te)

        print 'load weights {}'.format(model.name)
        model.network.load_weights('data/{}_weights.h5'.format(model.name))
        # FIX: remove and add zmuv mean and zmuv std no Preprocessor augment.py
        if not hasattr(model.preprocessor, 'zmuv_mean'):
            model.preprocessor.fit(X_tr, Y_tr)

        model.save('data/' + model.name)
        pred_blobs_te, probs_te = neural.predict_proba(model, pred_blobs_te,
                                                       rois_te)
        util.save_rois_with_probs(rois_te, probs_te)
        fold_idx += 1
    def unsupervised_augment(self, model, X_train, Y_train, X_test, Y_test, fold=None, streams=False, nb_epoch=1, pos_neg_ratio=1.0, mode=None):
        cropped_shape = (self.roi_size, self.roi_size)
        channels = X_train.shape[1] if self.args.datasets == None else X_train[0].shape[1]
        self.network = neural.create_network(model, (channels,) + cropped_shape, fold, streams)
        #self.load_cnn_weights('data/{}_fold_{}'.format(model, fold))
        self.load_cnn('data/{}_fold_{}'.format(model, fold))

        if mode in {'add-random'}:
            self.network.generator.ratio = pos_neg_ratio
            self.network.generator.mode = 'balance_dataset'

        if self.args.datasets == None:
            X_train, Y_train, X_test, Y_test = self.network.preprocess_augment(X_train, Y_train, X_test, Y_test, streams=(self.streams != 'none'), cropped_shape=cropped_shape, disable_perturb=False)
            '''
            X_train = (np.array(X_train), np.array(X_train))
            X_test = (np.array(X_test), np.array(X_test))
            Y_train = (np.array(Y_train), np.array(Y_train))
            Y_test = (np.array(Y_test), np.array(Y_test))
            X_train, Y_train, X_test, Y_test = preprocess_dataset(X_train, Y_train, X_test, Y_test, streams=(self.streams != 'none'), config=self.args.preprocess_dataset, mode='hdf5')
            X_train, Y_train = self._split_data_pos_neg(X_train, Y_train)
            X_test, Y_test = self._split_data_pos_neg(X_test, Y_test)
            std = np.std(X_train[0])
            '''
            #self.helper_model = ae.swwae_augment(self.network.network, X_train, Y_train, X_test, Y_test, finetune_epochs=nb_epoch, multipliers=self.multipliers, layerwise_epochs=self.lw_epochs, decoder_epochs=self.dec_epochs, lr=self.lr, model_name=self.init)
            self.helper_model = ae.swwae_augment_hdf5(self.network.network, self.network.generator, X_train, Y_train, X_test, Y_test, finetune_epochs=nb_epoch, multipliers=self.multipliers, layerwise_epochs=self.lw_epochs, decoder_epochs=self.dec_epochs, lr=self.lr, model_name=self.init)

        else:
            print "Preprocessor {}".format(self.network.preprocessor)
            print "Generator {}".format(self.network.generator)
            self.helper_model = ae.swwae_augment_hdf5(self.network.network, self.network.generator, X_train, Y_train, X_test, Y_test, finetune_epochs=nb_epoch, multipliers=self.multipliers, layerwise_epochs=self.lw_epochs, decoder_epochs=self.dec_epochs, lr=self.lr, model_name=self.init)
def model_evaluation_tr_lidc_te_jsrt(model_name, args):
    imgs_tr, blobs_tr = lidc.load()
    pred_blobs_tr = detect.read_blobs('data/{}-lidc-pred-blobs.pkl'.format(
        args.detector))
    masks_tr = np.load('data/aam-lidc-pred-masks.npy')
    imgs_te, blobs_te = jsrt.load(set_name='jsrt140p')
    pred_blobs_te = detect.read_blobs('data/{}-jsrt140p-pred-blobs.pkl'.format(
        args.detector))
    masks_te = np.load('data/aam-jsrt140p-pred-masks.npy')

    rois_tr = create_rois(imgs_tr, masks_tr, pred_blobs_tr, args)
    rois_te = create_rois(imgs_te, masks_te, pred_blobs_te, args)

    model = neural.create_network(model_name, args,
                                  (1, args.roi_size, args.roi_size))
    model.name += '-{}-lidc'.format(args.detector)
    froc = evaluate_model(model, blobs_tr, pred_blobs_tr, rois_tr, blobs_te,
                          pred_blobs_te, rois_te)
    froc = eval.average_froc([froc])

    legends = ['Test FROC (JSRT positives)']
    util.save_froc([froc],
                   'data/{}-{}-lidc-jsrt-froc'.format(model.name,
                                                      args.detector),
                   legends,
                   with_std=False)
def model_output(model_name, args):
    print "Model Outputs"
    imgs, blobs = jsrt.load(set_name='jsrt140p')
    pred_blobs = detect.read_blobs('data/{}-jsrt140p-pred-blobs.pkl'.format(
        args.detector))
    masks = np.load('data/aam-jsrt140p-pred-masks.npy')
    rois = create_rois(imgs, masks, pred_blobs, args)
    folds = KFold(n_splits=5, shuffle=True,
                  random_state=util.FOLDS_SEED).split(imgs)

    fold_idx = 0
    frocs = []
    legends = ['Fold {}'.format(i + 1) for i in range(5)]

    index = np.array(range(len(imgs)))
    for tr, te in folds:
        X_tr, Y_tr, _, _ = neural.create_train_test_sets(
            blobs[tr], pred_blobs[tr], rois[tr], blobs[te], pred_blobs[te],
            rois[te])
        model = neural.create_network(model_name, args,
                                      (1, args.roi_size, args.roi_size))
        model.name = model.name + '-{}-lidc.fold-{}'.format(
            args.detector, fold_idx + 1)
        model.network.load_weights('data/{}_weights.h5'.format(model.name))
        if not hasattr(model.preprocessor, 'zmuv_mean'):
            model.preprocessor.fit(X_tr, Y_tr)

        print "Predict ..."
        pred_blobs_te, probs_te, rois_te = neural.predict_proba(
            model, pred_blobs[te], rois[te])

        print "Save ..."
        eval.save_outputs(imgs[te], blobs[te], pred_blobs_te, probs_te,
                          rois_te, index[te])
def model_selection(model_name, args):
    # Load img, blobs and masks
    imgs, blobs, paths = lidc.load(pts=True, set_name=args.ds_tr)
    if args.ds_tr != args.ds_val:
        _, blobs_val, _ = lidc.load(pts=True, set_name=args.ds_val)
    else:
        blobs_val = blobs

    pred_blobs = detect.read_blobs('data/{}-lidc-pred-blobs.pkl'.format(
        args.detector))
    masks = np.load('data/aam-lidc-pred-masks.npy')
    assert len(imgs) == len(masks) and len(pred_blobs) == len(masks)

    # Load folds
    folds = util.model_selection_folds(imgs)

    # Create rois
    rois = create_rois(imgs, masks, pred_blobs, args, real_blobs=blobs)
    rois_val = create_rois(imgs, masks, pred_blobs, args, real_blobs=blobs_val)

    #  Set up CV
    frocs = []
    legends = ['Fold {}'.format(i + 1) for i in range(util.NUM_VAL_FOLDS)]
    fold_idx = 0

    for tr, te in folds:
        # Load and setup model
        model = neural.create_network(model_name, args,
                                      (1, args.roi_size, args.roi_size))
        model.network.summary()
        model.name = model.name + '.fold-{}'.format(fold_idx + 1)
        if args.load_model:
            print "Loading model: data/{}".format(model.name)
            model.load('data/' + model.name)

        # Train/test model
        froc = evaluate_model(model, blobs[tr], pred_blobs[tr], rois[tr],
                              blobs_val[te], pred_blobs[te], rois_val[te],
                              args.load_model)
        frocs.append(froc)

        # Record model results
        current_frocs = [eval.average_froc([froc_i]) for froc_i in frocs]
        util.save_froc(current_frocs,
                       'data/{}-{}-folds-froc'.format(model.name[:-7],
                                                      args.detector),
                       legends[:len(frocs)],
                       with_std=False)
        model.save('data/' + model.name)
        fold_idx += 1

    legends = ['Val FROC (LIDC-IDRI)']
    average_froc = eval.average_froc(frocs, np.linspace(0.0, 10.0, 101))
    util.save_froc([average_froc],
                   'data/{}-{}-val-froc'.format(model.name[:-7],
                                                args.detector),
                   legends,
                   with_std=True)
def save_performance_history(model_name, args, rois, blobs, pred_blobs, folds):
    model = neural.create_network(model_name, args,
                                  (1, args.roi_size, args.roi_size))
    model_name = model.name
    epochs = model.training_params['nb_epoch']
    frocs = []
    legends = []

    fold_idx = 0
    for tr, te in folds:
        model.load('data/' + model_name + '.fold-{}'.format(fold_idx + 1))
        frocs.append([])
        epochs_set = list(range(1, epochs + 1, 2))

        for epoch in epochs_set:
            weights_file_name = 'data/{}.weights.{:02d}.hdf5'.format(
                model.name, epoch)
            model.network.load_weights(weights_file_name)
            pred_blobs_te, probs_te = neural.predict_proba(
                model, pred_blobs[te], rois[te])
            frocs[fold_idx].append(
                eval.froc(blobs[te], pred_blobs_te, probs_te))
        fold_idx += 1

    frocs = np.array(frocs)
    froc_history = []
    aucs_history = []
    legends = []

    i = 0
    print "check -> frocs.shape {}".format(frocs.shape)
    for epoch in range(1, epochs + 1, 2):
        frocs_by_epoch = frocs[:, i]
        froc_history.append(
            eval.average_froc(np.array(frocs_by_epoch),
                              np.linspace(0.0, 10.0, 101)))
        aucs_history.append([])
        aucs_history[-1].append(
            util.auc(froc_history[-1], np.linspace(0.2, 4.0, 101))**2)
        aucs_history[-1].append(
            util.auc(froc_history[-1], np.linspace(0.0, 5.0, 101))**2)
        aucs_history[-1].append(
            util.auc(froc_history[-1], np.linspace(0.0, 10.0, 101))**2)
        legends.append('Val FROC (LIDC-IDRI), epoch {}'.format(epoch))
        i += 1

    util.save_froc(froc_history,
                   'data/{}-val-froc-by-epoch'.format(model_name),
                   legends,
                   with_std=False)
    util.save_aucs(list(range(1, epochs + 1, 2)), aucs_history,
                   'data/{}-val-aucs'.format(model_name),
                   ['AUC between 2-4', 'AUC between 0-5', 'AUC between 0-10'])
    def train_with_feature_set_keras(self, feats_tr, pred_blobs_tr, real_blobs_tr,
                                        feats_test=None, pred_blobs_test=None, real_blobs_test=None,
                                        model='shallow_1', model_suffix=None, network_init=None):
        print("{} {} {} {} {} {}".format(len(feats_tr), len(pred_blobs_tr), len(real_blobs_tr), len(feats_test), len(pred_blobs_test), len(real_blobs_test)))
        X_train, Y_train, X_test, Y_test = neural.create_train_test_sets(feats_tr, pred_blobs_tr, real_blobs_tr, 
                                                feats_test, pred_blobs_test, real_blobs_test, streams=self.streams )

        print "X_train shape {}".format(X_train.shape)
        self.network = neural.create_network(model, (X_train.shape[1], self.roi_size, self.roi_size), self.streams) 
        if network_init is not None:
            if self.args.transfer:
                self.load_cnn_weights('data/{}_{}'.format(network_init, model_suffix))
            else:
                self.load_cnn_weights(network_init)

        name =  'data/{}_{}'.format(model, model_suffix)
        history = self.network.fit(X_train, Y_train, X_test, Y_test, streams=(self.streams != 'none'), cropped_shape=(self.roi_size, self.roi_size), checkpoint_prefix=name, checkpoint_interval=2)
        return history
def classify(image, args):
    image = preprocess.antialiasing_dowsample(image, downsample=True)
    image = np.array([image])
    blobs, probs, mask = detect.detect_func(image[0], 'sbf', 'aam', 0.5)
    rois = create_rois([image], mask, [blobs], args)
    model = neural.create_network(args.model, args,
                                  (1, args.roi_size, args.roi_size))
    model.name += '-lidc'
    model.load('data/' + model.name)
    blobs, probs = neural.predict_proba(model, [blobs], rois)
    blobs, probs = blobs[0], probs[0]
    entries = [[blobs[i], probs[i]] for i in range(len(blobs))]
    entries = list(reversed(sorted(entries, key=itemgetter(1))))
    top_blobs = []
    top_probs = []
    for i in range(args.fppi):
        top_blobs.append(entries[i][0])
        top_probs.append([entries[i][1]])

    util.imwrite_with_blobs('data/classified', image[0], top_blobs)
    return blobs, probs, mask
def model_evaluation_jsrt_only(model_name, args):
    print "Model Evaluation Protocol 2"
    imgs, blobs = jsrt.load(set_name='jsrt140p')
    pred_blobs = detect.read_blobs('data/{}-jsrt140p-pred-blobs.pkl'.format(
        args.detector))
    masks = np.load('data/aam-jsrt140p-pred-masks.npy')
    rois = create_rois(imgs, masks, pred_blobs, args)
    folds = KFold(n_splits=5, shuffle=True,
                  random_state=util.FOLDS_SEED).split(imgs)

    fold_idx = 0
    frocs = []
    legends = ['Fold {}'.format(i + 1) for i in range(5)]
    for tr, te in folds:
        model = neural.create_network(model_name, args,
                                      (1, args.roi_size, args.roi_size))
        model.name = model.name + '-{}-lidc.fold-{}'.format(
            args.detector, fold_idx + 1)
        froc = evaluate_model(model, blobs[tr], pred_blobs[tr], rois[tr],
                              blobs[te], pred_blobs[te], rois[te])
        frocs.append(froc)

        current_frocs = [eval.average_froc([froc_i]) for froc_i in frocs]
        util.save_froc(current_frocs,
                       'data/{}-{}-only-jsrt-folds'.format(
                           model.name[:-7], args.detector),
                       legends[:len(frocs)],
                       with_std=False)
        model.save('data/' + model.name)
        fold_idx += 1

    froc = eval.average_froc(frocs)
    legends = ['Test FROC (JSRT positives)']
    util.save_froc([froc],
                   'data/{}-{}-only-jsrt'.format(model.name[:-7],
                                                 args.detector),
                   legends,
                   with_std=True)
def model_selection_unsup(model_name, args):
    imgs, blobs, paths = lidc.load(pts=True)
    pred_blobs = detect.read_blobs('data/{}-lidc-pred-blobs.pkl'.format(
        args.detector))
    masks = np.load('data/aam-lidc-pred-masks.npy')

    assert len(imgs) == len(masks) and len(pred_blobs) == len(masks)

    folds = util.model_selection_folds(imgs)
    rois = create_rois(imgs, masks, pred_blobs, args, real_blobs=blobs)

    frocs = []
    legends = ['Fold {}'.format(i + 1) for i in range(util.NUM_VAL_FOLDS)]

    fold_idx = 0
    for tr, te in folds:
        model = neural.create_network(model_name, args,
                                      (1, args.roi_size, args.roi_size))
        model.name = model.name + '.fold-{}'.format(fold_idx + 1)
        froc = evaluate_model(model, blobs[tr], pred_blobs[tr], rois[tr],
                              blobs[te], pred_blobs[te], rois[te])
        frocs.append(froc)

        current_frocs = [eval.average_froc([froc_i]) for froc_i in frocs]
        util.save_froc(current_frocs,
                       'data/{}-{}-folds-froc'.format(model_name,
                                                      args.detector),
                       legends[:len(frocs)],
                       with_std=False)
        model.save('data/' + model.name)
        fold_idx += 1

    legends = ['Val FROC (LIDC-IDRI)']
    average_froc = eval.average_froc(frocs, np.linspace(0.0, 10.0, 101))
    util.save_froc([average_froc],
                   'data/{}-{}-val-froc'.format(model_name, args.detector),
                   legends,
                   with_std=True)
def transfer_with_imagenet(network_name):
    network = neural.create_network(network_name, (1, 1))

    # train
    train_datagen = ImageDataGenerator(rescale=1. / 255, horizontal_flip=True)

    train_generator = train_datagen.flow_from_directory(
        '../dbs/ILSVRC2012_img_train',
        target_size=(32, 32),
        color_mode='grayscale',
        class_mode='categorical',
        batch_size=32,
        shuffle=True)

    optimizer = SGD(lr=0.01)
    network.network.compile(loss='categorical_crossentropy',
                            optimizer=optimizer)

    network.network.fit_generator(train_generator,
                                  samples_per_epoch=3200000,
                                  nb_epoch=2)

    network.save(network_name)
    def joint_training(self, model, X_train, Y_train, X_test, Y_test, fold=None):
        cropped_shape = (self.roi_size, self.roi_size)
        self.network = neural.create_network(model, (X_train.shape[1],) + cropped_shape, fold, self.streams)

        X_train, Y_train, X_test, Y_test = self.network.preprocess_augment(X_train, Y_train, X_test, Y_test, streams=(self.streams != 'none'), cropped_shape=cropped_shape)
        self.helper_model = ae.swwae_train(self.network.network, X_train, Y_train, X_test, Y_test, finetune_epochs=self.all_epochs, multipliers=self.multipliers, layerwise_epochs=self.lw_epochs, decoder_epochs=self.dec_epochs, lr=self.lr, model_name=self.init)
 def pretrain(self, model, X, fold=None, streams=False, nb_epoch=1):
     cropped_shape = (self.roi_size, self.roi_size)
     self.network = neural.create_network(model, (X.shape[1],) + cropped_shape, fold, streams)
     ae.pretrain_layerwise(self.network.network, X, nb_epoch=nb_epoch)
Exemple #14
0
def froc_by_epochs(data,
                   blobs,
                   augmented_blobs,
                   rois,
                   folds,
                   network_model,
                   nb_epochs=30,
                   epoch_interval=2):
    network_init = None
    roi_size = 32
    streams = 'none'

    imgs = []
    masks = []
    for i in range(len(data)):
        img, lung_mask = data.get(i, downsample=True)
        sampled, lce, norm = preprocess.preprocess_hardie(img,
                                                          lung_mask,
                                                          downsample=True)
        imgs.append([lce])
        masks.append(lung_mask)
    imgs = np.array(imgs)
    masks = np.array(masks)

    # Hardcoding blob set shapes
    blobs2 = blobs
    blobs = blobs.reshape((len(blobs), 3))

    nb_checkpoints = int(nb_epochs / epoch_interval)
    epochs = np.linspace(epoch_interval, nb_checkpoints * epoch_interval,
                         nb_checkpoints).astype(np.int)

    av_frocs = []
    names = []
    aucs1 = []
    aucs2 = []
    for epoch in epochs:
        frocs = []
        fold = 1
        for tr_idx, te_idx in folds:
            print "Fold {} ...".format(fold)
            X_train, Y_train, X_test, Y_test = neural.create_train_test_sets(
                rois[tr_idx],
                augmented_blobs[tr_idx],
                blobs[tr_idx],
                rois[te_idx],
                augmented_blobs[te_idx],
                blobs[te_idx],
                streams=streams,
                detector=True)

            # load network
            network = neural.create_network(network_model,
                                            X_train.shape,
                                            fold,
                                            streams,
                                            detector=False)
            name = 'data/{}_fold_{}.epoch_{}'.format(network_model, fold,
                                                     epoch)
            network.network.load_weights('{}_weights.h5'.format(name))

            # open network on detector mode
            detector_network = neural.create_network(network_model,
                                                     X_train.shape,
                                                     fold,
                                                     streams,
                                                     detector=True)
            copy_weights(network, detector_network)

            # evaluate network on test
            blobs_te_pred, probs_te_pred = detect_with_network(
                detector_network, imgs[te_idx], masks[te_idx], fold=fold)

            froc = eval.froc(blobs2[te_idx], blobs_te_pred, probs_te_pred)
            frocs.append(froc)
            fold += 1

        names.append('{}, epoch {}'.format(network_model, epoch))
        ops = eval.average_froc(frocs, fppi_range)
        av_frocs.append(ops)
        aucs1.append(util.auc(ops, range(0, 60)))
        aucs2.append(util.auc(ops, range(0, 40)))
        util.save_auc(
            np.array(range(1,
                           len(aucs1) + 1)) * epoch_interval, aucs1,
            'data/{}-auc-0-60'.format(network_model))
        util.save_auc(
            np.array(range(1,
                           len(aucs2) + 1)) * epoch_interval, aucs2,
            'data/{}-auc-0-40'.format(network_model))

    return av_frocs, names
Exemple #15
0
def eval_cnn_detector(data, blobs, augmented_blobs, rois, folds, model):
    fold = 1
    network_init = None
    roi_size = 32
    streams = 'none'

    imgs = []
    masks = []
    for i in range(len(data)):
        img, lung_mask = data.get(i, downsample=True)
        sampled, lce, norm = preprocess.preprocess_hardie(img,
                                                          lung_mask,
                                                          downsample=True)
        imgs.append([lce])
        masks.append(lung_mask)
    imgs = np.array(imgs)
    masks = np.array(masks)

    # Hardcoding blob set shapes
    blobs2 = blobs
    blobs = blobs.reshape((len(blobs), 3))

    frocs = []
    for tr_idx, te_idx in folds:
        print "Fold {} ...".format(fold)
        X_train, Y_train, X_test, Y_test = neural.create_train_test_sets(
            rois[tr_idx],
            augmented_blobs[tr_idx],
            blobs[tr_idx],
            rois[te_idx],
            augmented_blobs[te_idx],
            blobs[te_idx],
            streams=streams,
            detector=True)

        network = neural.create_network(model,
                                        X_train.shape,
                                        fold,
                                        streams,
                                        detector=False)
        if network_init is not None:
            network.network.load_weights('data/{}_fold_{}_weights.h5'.format(
                network_init, fold))

        # save network
        name = 'data/{}_fold_{}'.format(model, fold)
        history = network.fit(X_train,
                              Y_train,
                              X_test,
                              Y_test,
                              streams=(streams != 'none'),
                              cropped_shape=(roi_size, roi_size),
                              checkpoint_prefix=name,
                              checkpoint_interval=2,
                              loss='mse')
        network.save(name)

        # open network on detector mode
        network.network.summary()
        detector_network = neural.create_network(model,
                                                 X_train.shape,
                                                 fold,
                                                 streams,
                                                 detector=True)
        detector_network.network.summary()
        copy_weights(network, detector_network)
        #network.network.load_weights('{}_weights.h5'.format(name))
        #network.load(name)

        # evaluate network on test
        blobs_te_pred, probs_te_pred = detect_with_network(detector_network,
                                                           imgs[te_idx],
                                                           masks[te_idx],
                                                           fold=fold)

        froc = eval.froc(blobs2[te_idx], blobs_te_pred, probs_te_pred)
        frocs.append(froc)
        fold += 1

    av_froc = eval.average_froc(frocs, fppi_range)
    return av_froc