Ejemplo n.º 1
0
 def __call__(self, sess, tup, save_dir, model):
     print(tup)
     img = nib.load(tup[0])
     img_data = img.get_data()
     vol_shape = img_data.shape
     data_loader = StandardDataLoaderPowerDoppler(self.stride,
                                                  self.segment_size_in)
     vs, vsegs, vpds = data_loader.vol_s(tup, crop_by=self.crop_by)
     print(self.stride)
     yr_labels, yr = self.evaluate_case(sess, model, self.batch_size, vs,
                                        vsegs, vpds)
     vpreds = [
         VolumeSegment(start_voxel=vol.start_voxel, seg_arr=seg_arr)
         for vol, seg_arr in zip(vsegs, yr)
         if np.all(vol.start_voxel - vol_shape < 0)
     ]
     for v_pred in vpreds:
         v_pred.compute_indices(self.segment_size_out, vol_shape)
     img_handler = ImageHandler()
     pre_arr = img_handler.create_image_from_windows(vpreds, vol_shape)
     img_nii = nib.Nifti1Image(pre_arr.astype(np.float32),
                               affine=img.affine)
     out_name = os.path.join(
         save_dir,
         'pred_' + os.path.basename(tup[0]).split('.')[0] + '.nii.gz')
     nib.nifti1.save(img_nii, out_name)
     if len(tup) == 4:
         seg_img = nib.load(tup[2]).get_data().astype(int)
         seg_img_one_hot = np.eye(self.nlabels,
                                  dtype=int)[seg_img.reshape(-1)]
         pred_img_one_hot = np.eye(self.nlabels, dtype=int)[pre_arr.astype(
             np.uint8).reshape(-1)]
         dice_arr = 2 * np.sum(seg_img_one_hot * pred_img_one_hot,
                               axis=0) / (np.sum(seg_img_one_hot, axis=0) +
                                          np.sum(pred_img_one_hot, axis=0))
         dice = np.sum(dice_arr)
         fpr = np.sum((1 - seg_img_one_hot) *
                      pred_img_one_hot) / np.sum(1 - seg_img_one_hot)
         tpr = np.sum(
             seg_img_one_hot * pred_img_one_hot) / np.sum(seg_img_one_hot)
         print("TPR:", tpr, " FPR: ", fpr, " DSC: ", dice_arr)
         return tuple(dice_arr)
     else:
         return -1
Ejemplo n.º 2
0
import os
import numpy as np
import tensorflow as tf
import tflearn

from oxnnet.data_loader import StandardDataLoaderPowerDoppler
from oxnnet.record import RecordWriter, PowerDopplerProcessTup, RecordReader
from oxnnet.full_inferer import PowerDopplerFullInferer
from oxnnet.feats_writer import StandardFeatsWriter

train_eval_test_no = [75, 15, 10]
segment_size_in = np.array([64] * 3)
segment_size_out = segment_size_in
crop_by = 0
stride = np.array([32] * 3, dtype=np.int)
data_loader = StandardDataLoaderPowerDoppler(stride, segment_size_in)



#https://github.com/caglar/noisy_units/blob/master/codes/tf/nunits.py
HardTanh = lambda x: tf.minimum(tf.maximum(x, -1.), 1.)
lin_sigmoid = lambda x: 0.25 * x + 0.5
# Sigmoid = lambda x, use_noise=0: T.nnet.sigmoid(x)
HardSigmoid = lambda x, angle=0.25: tf.maximum(tf.minimum(angle*x + 0.5, 1.0), 0.0)
HardSigmoid = lambda x: tf.minimum(tf.maximum(lin_sigmoid(x), 0.), 1.)

def NTanh(x, use_noise, alpha=1.05, c=0.5, half_normal=False):
    """
    Noisy Hard Tanh Units: NAN without learning p
    ----------------------------------------------------
    Arguments:
Ejemplo n.º 3
0
from oxnnet.full_inferer import PowerDopplerFullInferer
from oxnnet.feats_writer import StandardFeatsWriter

segment_size_in_test = np.array([78] * 3)
segment_size_in = np.array([78] * 3)
segment_size_out = np.array([34] * 3)  #calc_out_shape(segment_size_in) #-16
crop_by = (segment_size_in - segment_size_out) // 2
#train_eval_test_no = [1000,300,1000]
train_eval_test_no = [75, 15, 10]
stride = np.array([34] * 3, dtype=np.int)
stride_test = np.array([34] * 3, dtype=np.int)
batch_size_test = 1
lowest_res_size = 5

data_loader = StandardDataLoaderPowerDoppler(stride,
                                             segment_size_in,
                                             crop_by=crop_by,
                                             aug_pos_samps=True)


def build_record_writer(data_dir, dir_type_flag):
    if dir_type_flag == 'meta':
        data_loader.read_metadata(data_dir)
    elif dir_type_flag == 'deepmedic':
        data_loader.read_deepmedic_dir(data_dir)
    else:
        data_loader.read_data_dir(data_dir, train_eval_test_no)
    return RecordWriter(data_loader,
                        PowerDopplerProcessTup,
                        num_of_threads=os.cpu_count() - 1)