def print_less_images_from_file(file_path,
                                eval_path,
                                desired_measures=['dice']):
    # CSV Header Structure
    #Structure, Cardiac_Phase, Measure, Performance, Value, File

    #Prepare folders for each measure

    source_path = eval_path + "/../"
    print_path = source_path + "/image_save/"
    nii_types = ['ground_truth', 'prediction', 'image']

    with open(file_path, 'r') as csvfile:
        reader = csv.DictReader(csvfile)
        for row in reader:
            if row['Measure'] in desired_measures:
                #folder_path
                folder_path = print_path + "/" + \
                              row['Cardiac_Phase'] + "/" + \
                              row['Measure'] + "/" + \
                              row['Performance'] + "/"

                os.makedirs(folder_path, exist_ok=True)

                #get patient number
                r = 'patient060_ED.nii.gz'
                p_num = r.split('_')[0][-3:]  #row['File'].split('_')[0][-3:]
                logging.info("Saving image for patient {}".format(p_num))

                for nii_type in nii_types:
                    #load in
                    file_path = source_path + nii_type + "/" + r  #row['File']
                    nii_file = nib.load(file_path).get_data()
                    slices = np.multiply([0.2, 0.5, 0.8],
                                         nii_file.shape[-1]).astype(np.int)

                    for img_idx in slices:

                        image_name = "{}-{}_{:3f}_{}-{}_{}".format(
                            row['Performance'], row['Measure'],
                            float(row['Value']), p_num, img_idx, nii_type)

                        if nii_type == "image":
                            image_utils.print_grayscale(
                                np.squeeze(nii_file[..., img_idx]),
                                folder_path, image_name)
                        elif nii_type == "prediction":
                            x = np.squeeze(nii_file[..., img_idx])
                            x[x == np.max(x)] = 0
                            image_utils.print_coloured(x, folder_path,
                                                       image_name)
                        else:
                            image_utils.print_coloured(
                                np.squeeze(nii_file[..., img_idx]),
                                folder_path, image_name)
def main(fs_exp_config, slices, test):
    # Load data
    data = load_and_maybe_process_data(
        input_folder=sys_config.data_root,
        preprocessing_folder=sys_config.preproc_folder,
        mode=fs_exp_config.data_mode,
        size=fs_exp_config.image_size,
        target_resolution=fs_exp_config.target_resolution,
        force_overwrite=False
    )
    # Get images
    batch_size = len(slices)
    if test:
        slices = slices[slices < len(data['images_test'])]
        images = data['images_test'][slices, ...]
        prefix = 'test'
    else:
        slices = slices[slices < len(data['images_train'])]
        images = data['images_train'][slices, ...]
        prefix = 'train'

    image_tensor_shape = [batch_size] + list(fs_exp_config.image_size) + [1]
    images_pl = tf.placeholder(tf.float32, shape=image_tensor_shape, name='images')
    feed_dict = {
        images_pl: np.expand_dims(images, -1),
    }

    #Get full supervision prediction
    mask_pl, softmax_pl = model.predict(images_pl, fs_exp_config.model_handle, fs_exp_config.nlabels)
    saver = tf.train.Saver()
    init = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init)
        checkpoint_path = utils.get_latest_model_checkpoint_path(fs_model_path,
                                                                 'model_best_dice.ckpt')
        saver.restore(sess, checkpoint_path)
        fs_predictions, _ = sess.run([mask_pl, softmax_pl], feed_dict=feed_dict)

    for i in range(batch_size):
        print_coloured(fs_predictions[i, ...],  filepath=OUTPUT_FOLDER, filename='{}{}_fs_pred'.format(prefix, slices[i]))
예제 #3
0
def main(exp_config, batch_size=3):

    # Load data
    data = h5py.File(sys_config.project_root + exp_config.scribble_data, 'r')

    slices = np.random.randint(low=0,
                               high=data['images_test'].shape[0],
                               size=batch_size)
    slices = np.sort(np.unique(slices))
    slices = [80, 275, 370]
    batch_size = len(slices)
    images = data['images_test'][slices, ...]
    masks = data['masks_test'][slices, ...]
    #masks[masks == 0] = 4

    num_recursions = most_recent_recursion(model_path)

    image_tensor_shape = [batch_size] + list(exp_config.image_size) + [1]
    images_pl = tf.placeholder(tf.float32,
                               shape=image_tensor_shape,
                               name='images')
    mask_pl, softmax_pl = model.predict(images_pl, exp_config.model_handle,
                                        exp_config.nlabels)
    #mask_fs_pl, softmax_fs_pl = model.predict(images_pl,  unet2D_bn_modified, 4)
    saver = tf.train.Saver()
    init = tf.global_variables_initializer()

    predictions = np.zeros([batch_size] + list(exp_config.image_size) +
                           [num_recursions + 1])

    feed_dict = {
        images_pl: np.expand_dims(images, -1),
    }
    path = '/scratch_net/'
    with tf.Session() as sess:
        sess.run(init)
        pred_size = 0
        for recursion in range(num_recursions + 1):
            try:
                try:
                    checkpoint_path = utils.get_latest_model_checkpoint_path(
                        model_path,
                        'recursion_{}_model_best_dice.ckpt'.format(recursion))
                except:
                    checkpoint_path = utils.get_latest_model_checkpoint_path(
                        model_path,
                        'recursion_{}_model.ckpt'.format(recursion))
                saver.restore(sess, checkpoint_path)
                mask_out, _ = sess.run([mask_pl, softmax_pl],
                                       feed_dict=feed_dict)
                for mask in range(batch_size):
                    predictions[
                        mask, ...,
                        pred_size] = image_utils.keep_largest_connected_components(
                            np.squeeze(mask_out[mask, ...]))
                print("Classified for recursion {}".format(recursion))
                pred_size += 1
            except Exception as e:
                print(e)

    num_recursions = pred_size
    fig = plt.figure()
    num_cols = num_recursions + 3
    #RW:
    path = base_path + "/poster/"
    for recursion in range(num_recursions):
        predictions[...,
                    recursion] = segment(images,
                                         np.squeeze(predictions[...,
                                                                recursion]),
                                         beta=exp_config.rw_beta,
                                         threshold=0)

    for r in range(batch_size):
        #Add the image
        # ax = fig.add_subplot(batch_size, num_cols, 1 + r*num_cols)
        # ax.axis('off')
        # ax.imshow(np.squeeze(images[r, ...]), cmap='gray')
        image_utils.print_grayscale(
            np.squeeze(images[r, ...]), path,
            '{}_{}_image'.format(exp_config.experiment_name, slices[r]))
        #Add the mask
        # ax = fig.add_subplot(batch_size, num_cols, 2 + r*num_cols)
        # ax.axis('off')
        # ax.imshow(np.squeeze(masks[r, ...]), vmin=0, vmax=4, cmap='jet')
        image_utils.print_coloured(
            np.squeeze(masks[r, ...]), path,
            '{}_{}_gt'.format(exp_config.experiment_name, slices[r]))

        #predictions[r, ...] = segment(images, np.squeeze(predictions[r, ...]), beta=exp_config.rw_beta, threshold=0)
        for recursion in range(num_recursions):
            #Add each prediction
            image_utils.print_coloured(
                np.squeeze(predictions[r, ..., recursion]), path,
                '{}_{}_pred_r{}'.format(exp_config.experiment_name, slices[r],
                                        recursion))
            #ax = fig.add_subplot(batch_size, num_cols, 3 + recursion + r*num_cols)
            #ax.axis('off')
            #ax.imshow(np.squeeze(predictions[r, ..., recursion]), vmin=0, vmax=4, cmap='jet')

    while True:
        plt.axis('off')
        plt.show()
예제 #4
0
def main(ws_exp_config, slices, test):
    # Load data
    exp_dir = sys_config.project_root + 'acdc_logdir/' + ws_exp_config.experiment_name + '/'
    base_data = h5py.File(os.path.join(exp_dir, 'base_data.hdf5'), 'r')

    # Get number of recursions
    num_recursions = acdc_data.most_recent_recursion(
        sys_config.project_root + 'acdc_logdir/' +
        ws_exp_config.experiment_name)
    print(num_recursions)

    num_recursions += 1
    # Get images
    batch_size = len(slices)

    if test:
        slices = slices[slices < len(base_data['images_test'])]
        images = base_data['images_test'][slices, ...]
        gt = base_data['masks_test'][slices, ...]
        prefix = 'test'
    else:
        slices = slices[slices < len(base_data['images_train'])]
        images = base_data['images_train'][slices, ...]
        gt = base_data['masks_train'][slices, ...]
        scr = base_data['scribbles_train'][slices, ...]
        prefix = 'train'

    image_tensor_shape = [batch_size] + list(ws_exp_config.image_size) + [1]
    images_pl = tf.placeholder(tf.float32,
                               shape=image_tensor_shape,
                               name='images')
    feed_dict = {
        images_pl: np.expand_dims(images, -1),
    }

    #Get weak supervision predictions
    mask_pl, softmax_pl = model.predict(images_pl, ws_exp_config.model_handle,
                                        ws_exp_config.nlabels)
    saver = tf.train.Saver()
    init = tf.global_variables_initializer()
    predictions = np.zeros([batch_size] + list(ws_exp_config.image_size) +
                           [num_recursions])
    predictions_klc = np.zeros_like(predictions)
    predictions_rw = np.zeros_like(predictions)
    with tf.Session() as sess:
        sess.run(init)
        for recursion in range(num_recursions):
            try:
                try:
                    checkpoint_path = utils.get_latest_model_checkpoint_path(
                        ws_model_path,
                        'recursion_{}_model_best_xent.ckpt'.format(recursion))
                except:
                    try:
                        checkpoint_path = utils.get_latest_model_checkpoint_path(
                            ws_model_path,
                            'recursion_{}_model_best_dice.ckpt'.format(
                                recursion))
                    except:
                        checkpoint_path = utils.get_latest_model_checkpoint_path(
                            ws_model_path,
                            'recursion_{}_model.ckpt'.format(recursion))
                saver.restore(sess, checkpoint_path)
                mask_out, _ = sess.run([mask_pl, softmax_pl],
                                       feed_dict=feed_dict)
                predictions[..., recursion] = mask_out
                for i in range(batch_size):
                    predictions_klc[
                        i, :, :,
                        recursion] = image_utils.keep_largest_connected_components(
                            mask_out[i, ...])

                predictions_rw[..., recursion] = segment(
                    images,
                    np.squeeze(predictions_klc[..., recursion]),
                    beta=ws_exp_config.rw_beta,
                    threshold=0)

                print("Classified for recursion {}".format(recursion))
            except Exception:
                predictions[..., recursion] = -1 * np.zeros_like(
                    predictions[..., recursion])
                print("Could not find checkpoint for recursion {} - skipping".
                      format(recursion))

    for i in range(batch_size):
        pref = '{}{}'.format(prefix, slices[i])

        print_grayscale(images[i, ...],
                        filepath=OUTPUT_FOLDER,
                        filename='{}_image'.format(pref))
        print_coloured(gt[i, ...],
                       filepath=OUTPUT_FOLDER,
                       filename='{}_gt'.format(pref))
        for recursion in range(num_recursions):
            if np.max(predictions[i, :, :, recursion]) >= -0.5:
                print_coloured(predictions[i, :, :, recursion],
                               filepath=OUTPUT_FOLDER,
                               filename="{}_ws_pred_r{}".format(
                                   pref, recursion))
                print_coloured(predictions_klc[i, :, :, recursion],
                               filepath=OUTPUT_FOLDER,
                               filename="{}_ws_pred_klc_r{}".format(
                                   pref, recursion))
                print_coloured(predictions_rw[i, :, :, recursion],
                               filepath=OUTPUT_FOLDER,
                               filename="{}_ws_pred_klc_rw_r{}".format(
                                   pref, recursion))
                print("Dice coefficient for slice {} is {}".format(
                    slices[i],
                    dice(predictions_rw[i, :, :, recursion], gt[i, ...])))
        if not test:
            print_coloured(scr[i, ...],
                           filepath=OUTPUT_FOLDER,
                           filename='{}_scribble'.format(pref))