def combine_liver_liver(infolder1, infolder2, outfolder): livers1 = load_segmentations_test(infolder1, prefix='test-segmentation-', suffix='.nii') livers2 = load_segmentations_test(infolder2, prefix='test-segmentation-', suffix='.nii') #print livers1, livers2 assert len(livers1) == len( livers2), 'liver1 number must equal to livers2 number' print 'Total number of livers1: {}\n'.format(len(livers1)) if not os.path.exists(outfolder): os.makedirs(outfolder) for ind in range(len(livers1)): liver1_f = livers1[ind] liver2_f = livers2[ind] liver1_index = os.path.splitext(liver1_f)[0].split('-')[2] liver2_index = os.path.splitext(liver2_f)[0].split('-')[2] assert liver1_index == liver2_index, 'index mismatch' liver1_path = os.path.join(infolder1, liver1_f) liver2_path = os.path.join(infolder2, liver2_f) print 'liver1_path: {}'.format(liver1_path) print 'liver2_path: {}'.format(liver2_path) # load Image liver1_metadata = load_data(liver1_path) liver2_metadata = load_data(liver2_path) assert liver1_metadata is not None, 'liver1 open failed' assert liver2_metadata is not None, 'liver2 open failed' liver1_data = liver1_metadata['image_data'] liver2_data = liver2_metadata['image_data'] ### # keep both liver1_data's liver label and liver2_data's liver label ### print liver1_data.dtype, liver2_data.dtype print np.sum(liver1_data == 0), np.sum(liver1_data == 1), np.sum( liver1_data == 2) print np.sum(liver2_data == 0), np.sum(liver2_data == 1), np.sum( liver2_data == 2) liver1_data += liver2_data liver1_data[liver1_data > 0] = 1 print np.sum(liver1_data == 0), np.sum(liver1_data == 1), np.sum( liver1_data == 2) assert np.sum(liver1_data == 0) == np.sum( liver1_data < 1), 'liver1_data == 0, error' assert np.sum(liver1_data == 1) == np.sum( (liver1_data > 0) & (liver1_data < 2)), 'liver1_data == 1, error' assert np.sum(liver1_data == 2) == np.sum( liver1_data > 1), 'liver1_data == 2, error' print liver1_data.dtype ### save merge results outpath = os.path.join(outfolder, liver1_f) print 'Output file will save to: {}\n'.format(outpath) save_data(liver1_data, outpath) print '=== DONE ==='
def merge_liver_lesion(infolder1, infolder2, outfolder): ### # keep liver_data's liver label and lesion_data's lesion label ### livers = load_segmentations_test(infolder1, prefix='test-segmentation-', suffix='.nii') lesions = load_segmentations_test(infolder2, prefix='test-segmentation-', suffix='.nii') #print livers, lesions assert len(livers) == len( lesions), 'liver number must equal to lesions number' print 'Total number of livers: {}\n'.format(len(livers)) if not os.path.exists(outfolder): os.makedirs(outfolder) for ind in range(len(livers)): liver_f = livers[ind] lesion_f = lesions[ind] liver_index = os.path.splitext(liver_f)[0].split('-')[2] lesion_index = os.path.splitext(lesion_f)[0].split('-')[2] assert liver_index == lesion_index, 'index mismatch' liver_path = os.path.join(infolder1, liver_f) lesion_path = os.path.join(infolder2, lesion_f) print 'liver_path: {}'.format(liver_path) print 'lesion_path: {}'.format(lesion_path) # load Image liver_metadata = load_data(liver_path) lesion_metadata = load_data(lesion_path) assert liver_metadata is not None, 'liver open failed' assert lesion_metadata is not None, 'lesion open failed' liver_data = liver_metadata['image_data'] lesion_data = lesion_metadata['image_data'] ### # keep liver_data's liver label and lesion_data's lesion label ### print liver_data.dtype, lesion_data.dtype # get liver only from liver_data, set others to zeros liver_data[liver_data > 1] = 0 # get lesion only from lesion_data, set others to zeros lesion_data[lesion_data < 2] = 0 # move lesion to liver_data, liver will overrie liver_data += lesion_data liver_data[liver_data > 1] = 2 # print np.sum(liver_data==0), np.sum(liver_data == 1), np.sum(liver_data==2) print liver_data.dtype ### save merge results outpath = os.path.join(outfolder, liver_f) print 'Output file will save to: {}\n'.format(outpath) save_data(liver_data, outpath) #print 'Output saved to: {}\n'.format(foutpath) print '=== DONE ==='
def evaluation_thread(self, eval_queue, split_index): """ Evaluation Thread """ data_list = range(split_index[0], split_index[1]) for ind_data in data_list: """ Get Item Infos """ im_path = self.imdb[ind_data]['image'] gt_path = self.imdb[ind_data]['gt'] filename, ext = osp.splitext(osp.split(gt_path)[1]) filename = 'volume-{}'.format(filename.split('-')[1]) filename = '{}_pred{}'.format(filename, ext) if ext in ('.jpg', '.png', '.npy'): prob_path = osp.join(self.output_dir, filename.split('_slice_')[0], 'label', filename) elif ext in ('.nii'): prob_path = osp.join(self.output_dir, 'label', filename) else: print 'error' print gt_path, prob_path """ Load Label and Prob """ gt_metadata = load_data(gt_path, flags=0) prob_metadata = load_data(prob_path, flags=0) assert (gt_metadata is not None) and (prob_metadata is not None), 'load failed' gt_data = gt_metadata['image_data'] voxelspacing = gt_metadata['image_header'].get_zooms()[:3] prob_data = prob_metadata['image_data'] if self.params.CLASS_NUM == 2: gt_data[gt_data > 0] = 1 # add 2 in case #print np.sum(prob_data > 1), np.sum(gt_data > 1) if np.sum(prob_data > 1) == 0: prob_data[0, 0, 0] = 2 """ Calculate the Scores """ liver_scores = get_scores(prob_data >= 1, gt_data >= 1, voxelspacing) lesion_scores = get_scores(prob_data == 2, gt_data == 2, voxelspacing) print "Liver dice", liver_scores[ 'dice'], "Lesion dice", lesion_scores['dice'] eval_queue.put( tuple((im_path, gt_path, liver_scores, lesion_scores)))
def model_combination(infolders, outfolder): models = [] for infolder in infolders: # load each models' images model = load_segmentations_test(infolder, prefix='test-segmentation-', suffix='.nii') if models: assert len(models[len(models) - 1]) == len(model), 'model mismatch' print 'model has {} images\n'.format(len(model)) models.append(model) if not os.path.exists(outfolder): os.makedirs(outfolder) # iterate images for ind_image in xrange(len(models[0])): multi_images_d = [] multi_images_f = [] # iterate load models for ind_model in xrange(len(models)): model = models[ind_model] image_f = model[ind_image] # image_path image_path = os.path.join(infolders[ind_model], image_f) print 'Model {} image_path: {}'.format(ind_model, image_path) # load image data image_metadata = load_data(image_path) assert image_metadata is not None, 'image open failed' image_data = image_metadata['image_data'] multi_images_d.append(image_data) multi_images_f.append(image_f) # iterate check models' images for ind_f in xrange(1, len(multi_images_f)): assert multi_images_f[0] == multi_images_f[ind_f], 'index mismatch' assert multi_images_d[0].shape == multi_images_d[ ind_f].shape, 'image shape mismatch' ### iterate combine models out_image = np.zeros(multi_images_d[0].shape, dtype=multi_images_d[0].dtype) for ind_model in xrange(len(multi_images_d)): image_data = multi_images_d[ind_model] # print image_data.dtype, np.sum(image_data == 0), np.sum(image_data == 1), np.sum(image_data == 2) image_data[image_data > 1] = 10 # print image_data.dtype, np.sum(image_data == 0), np.sum(image_data == 1), np.sum(image_data == 10) out_image += image_data ### process the label out_image[out_image < 1] = 0 out_image[(out_image >= 1) & (out_image < 10)] = 1 out_image[out_image >= 10] = 2 # print out_image.dtype, np.sum(out_image == 0), np.sum(out_image == 1), np.sum(out_image == 2) ### save merge results outpath = os.path.join(outfolder, multi_images_f[0]) print 'Output file will save to: {}\n'.format(outpath) save_data(out_image, outpath) print '=== DONE ==='
def compute_image_statistics(image_path_list): """ Input : A list of image path Output: mean_of_images and number_of_images """ mean_of_images = 0.0 var_of_images = 0.0 number_of_images = 0 for im_path in image_path_list: im_metadata = load_data(im_path) im = im_metadata['image_data'] im = hounsfield_unit_window(im, hu_window=[ -200, 300 ]) #cfg.TRAIN.HU_WINDOW = [-200, 300] -95.3622154758 number_of_images += 1 mean_of_images = mean_of_images + (np.mean(im) - mean_of_images) / number_of_images var_of_images = var_of_images + (np.var(im) - var_of_images) / number_of_images std_of_images = np.sqrt(var_of_images) print 'mean:{}, std:{}'.format(mean_of_images, std_of_images) return mean_of_images, std_of_images
def refine_liver(infolder1, infolder2, outfolder): livers1 = load_segmentations_test(infolder1, prefix='test-segmentation-', suffix='.nii') livers2 = load_segmentations_test(infolder2, prefix='test-segmentation-', suffix='.nii') #print livers1, livers2 assert len(livers1) == len( livers2), 'liver1 number must equal to livers2 number' print 'Total number of livers1: {}\n'.format(len(livers1)) if not os.path.exists(outfolder): os.makedirs(outfolder) for ind in range(len(livers1)): liver1_f = livers1[ind] liver2_f = livers2[ind] liver1_index = os.path.splitext(liver1_f)[0].split('-')[2] liver2_index = os.path.splitext(liver2_f)[0].split('-')[2] assert liver1_index == liver2_index, 'index mismatch' liver1_path = os.path.join(infolder1, liver1_f) liver2_path = os.path.join(infolder2, liver2_f) print 'liver1_path: {}'.format(liver1_path) print 'liver2_path: {}'.format(liver2_path) # load Image liver1_metadata = load_data(liver1_path) liver2_metadata = load_data(liver2_path) assert liver1_metadata is not None, 'liver1 open failed' assert liver2_metadata is not None, 'liver2 open failed' liver1_data = liver1_metadata['image_data'] liver2_data = liver2_metadata['image_data'] ### # keep liver1_data's liver label and liver1_data's lesions label # and # add liver2_data's liver label ### print liver1_data.dtype, liver2_data.dtype print np.sum(liver1_data == 0), np.sum(liver1_data == 1), np.sum( liver1_data == 2) print np.sum(liver2_data == 0), np.sum(liver2_data == 1), np.sum( liver2_data == 2) # set liver1_data's lesion to label 10 in order to keep it # set liver2_data's lesion to label 0, in order to remove it liver1_data[liver1_data > 1] = 10 liver2_data[liver2_data > 1] = 0 print np.sum(liver1_data == 0), np.sum(liver1_data == 1), np.sum( liver1_data == 10) print np.sum(liver2_data == 0), np.sum(liver2_data == 1), np.sum( liver2_data == 2) # add liver2_data's liver to liver1_data liver1_data += liver2_data # set label liver1_data[liver1_data <= 0] = 0 liver1_data[(liver1_data > 0) & (liver1_data < 10)] = 1 liver1_data[liver1_data >= 10] = 2 print liver1_data.dtype print np.sum(liver1_data == 0), np.sum(liver1_data == 1), np.sum( liver1_data == 2) ### save merge results outpath = os.path.join(outfolder, liver1_f) print 'Output file will save to: {}\n'.format(outpath) save_data(liver1_data, outpath) print '=== DONE ==='
def model_average(infolders, outfolder): ''' Label_Path = infolder + 'label' Prob_Path = infolder + 'prob' MA_Path = infolder + 'model_average' ''' models = [] for infolder in infolders: # load each models' images model = load_segmentations_test(os.path.join(infolder, 'label'), prefix='test-segmentation-', suffix='.nii') if models: assert len(models[len(models) - 1]) == len(model), 'model mismatch' print 'model has {} images\n'.format(len(model)) models.append(model) if not os.path.exists(outfolder): os.makedirs(outfolder) # iterate images for ind_image in xrange(len(models[0])): multi_images_d = [] multi_images_f = [] # iterate load models for ind_model in xrange(len(models)): model = models[ind_model] image_f = model[ind_image] prob_c0_f = '{}-class-0{}'.format( os.path.splitext(image_f)[0], os.path.splitext(image_f)[1]) prob_c1_f = '{}-class-1{}'.format( os.path.splitext(image_f)[0], os.path.splitext(image_f)[1]) prob_c2_f = '{}-class-2{}'.format( os.path.splitext(image_f)[0], os.path.splitext(image_f)[1]) # path label_path = os.path.join(infolders[ind_model], 'label', image_f) prob_c0_path = os.path.join(infolders[ind_model], 'prob', prob_c0_f) prob_c1_path = os.path.join(infolders[ind_model], 'prob', prob_c1_f) prob_c2_path = os.path.join(infolders[ind_model], 'prob', prob_c2_f) print 'model {}:\n'.format(ind_model) print 'label_path: {}\n'.format(label_path) print 'prob_c0_path: {}\n'.format(prob_c0_path) print 'prob_c1_path: {}\n'.format(prob_c1_path) print 'prob_c2_path: {}\n'.format(prob_c2_path) # load data prob_c0_metadata = load_data(prob_c0_path) prob_c1_metadata = load_data(prob_c1_path) prob_c2_metadata = load_data(prob_c2_path) assert prob_c0_metadata is not None, 'prob_c0 open failed' assert prob_c1_metadata is not None, 'prob_c1 open failed' assert prob_c2_metadata is not None, 'prob_c2 open failed' prob_c0_data = prob_c0_metadata['image_data'] prob_c1_data = prob_c1_metadata['image_data'] prob_c2_data = prob_c2_metadata['image_data'] prob_data = np.concatenate( (prob_c0_data[np.newaxis, ...], prob_c1_data[np.newaxis, ...], prob_c2_data[np.newaxis, ...]), axis=0) multi_images_d.append(prob_data) multi_images_f.append(image_f) # iterate check models' images for ind_f in xrange(1, len(multi_images_f)): assert multi_images_f[0] == multi_images_f[ind_f], 'index mismatch' assert multi_images_d[0].shape == multi_images_d[ ind_f].shape, 'image shape mismatch' ### iterate add models out_image = np.zeros(multi_images_d[0].shape, dtype=multi_images_d[0].dtype) for ind_model in xrange(len(multi_images_d)): out_image += multi_images_d[ind_model] ### average models out_image /= float(len(multi_images_d)) ### argmax and transform the datatype to unint8 out_label = np.argmax(out_image, axis=0) out_label = out_label.astype(np.uint8) ### save out_label outpath = os.path.join(outfolder, multi_images_f[0]) print out_label.dtype print 'Output file will save to: {}\n'.format(outpath) save_data(out_label, outpath) ### process the label # out_image[out_image<1] = 0 # out_image[(out_image>=1)&(out_image<10)] = 1 # out_image[out_image>=10] = 2 # print out_image.dtype, np.sum(out_image == 0), np.sum(out_image == 1), np.sum(out_image == 2) print '=== DONE ==='
def prepare_data_unit(self, im_path, gt_path): """ Prepare Data Load, HU, Normalize, Subtract Mean, Zoom """ """ Load Seg and Data Preprocessing """ if gt_path is None: gt = None else: gt_metadata = load_data(gt_path, flags=0) assert gt_metadata is not None, 'gt_metadata is None' # parse metadata gt = gt_metadata['image_data'] gt_type = gt_metadata['image_type'] gt = gt.astype(np.float32, copy=False) """ Scale image """ #gt = scale_image(gt, target_size=self.params.SCALES[0], max_size=self.params.MAX_SIZE, im_type=gt_type) """ Load Image and Data Preprocessing """ im_metadata = load_data(im_path, flags=0) assert im_metadata is not None, 'im_metadata is None' # parse metadata im = im_metadata['image_data'] im_type = im_metadata['image_type'] """ ADJACENT Load Adjacent Slices if image type is 2D """ if self.params.ADJACENT and im_type == "2D": adj_im_data = prepare_adj_data(im_path) im_merged = np.zeros((im.shape[0], im.shape[1], 5), dtype=np.float32) im_merged[:, :, 0] = adj_im_data[0] im_merged[:, :, 1] = adj_im_data[1] im_merged[:, :, 2] = im[:, :, 0] im_merged[:, :, 3] = adj_im_data[2] im_merged[:, :, 4] = adj_im_data[3] im = im_merged if (gt is not None) and self.params.ADJACENT and gt_type == "2D": adj_gt_data = prepare_adj_data(gt_path) gt_merged = np.zeros((gt.shape[0], gt.shape[1], 5), dtype=np.float32) gt_merged[:, :, 0] = adj_gt_data[0] gt_merged[:, :, 1] = adj_gt_data[1] gt_merged[:, :, 2] = gt[:, :, 0] gt_merged[:, :, 3] = adj_gt_data[2] gt_merged[:, :, 4] = adj_gt_data[3] gt = gt_merged im = im.astype(np.float32, copy=False) """ Apply Hounsfield Unit Window The Hounsfield unit values will be windowed in the range HU_WINDOW to exclude irrelevant organs and objects. """ im = hounsfield_unit_window(im, hu_window=self.params.HU_WINDOW) """ Zero-center and Normalization """ im -= self.params.PIXEL_STATISTICS[0] im /= self.params.PIXEL_STATISTICS[1] #im = normalizer(im, src_range=self.params.HU_WINDOW, dst_range=self.params.DATA_RANGE) """ Scale image """ #if self.params.ADJACENT and im_type=="2D": # im_scaled = np.zeros((self.params.SCALES[0], self.params.SCALES[0], im.shape[2]), dtype=np.float32) # for ind in xrange(im.shape[2]): # im_scaled[:,:,ind] = scale_image(im[:,:,ind], target_size=self.params.SCALES[0], max_size=self.params.MAX_SIZE, im_type=im_type) # im = im_scaled #else: # im = scale_image(im, target_size=self.params.SCALES[0], max_size=self.params.MAX_SIZE, im_type=im_type) """ return useful infos """ return [im_path, gt_path, im, gt]