def update_choice(self, **kwargs): self.liver = kwargs.get("liver", self.liver) self.label = kwargs.get("label", self.label) self.mask = array_kits.merge_labels(self.mask_, [0, [1, 2]] if self.liver else [0, 2]).astype(np.int8) * 2 self.pred = array_kits.merge_labels(self.pred_, [0, [1, 2]] if self.liver else [0, self.label]).astype(np.int8) * 2
def dump_fp_bbox_from_prediction(label_dirs, pred_dir): pred_dir = Path(pred_dir) save_path = pred_dir.parent / "bboxes-{}.pkl".format(pred_dir.parent.name) all_bboxes = {} counter = 0 for pred_path in sorted(pred_dir.glob("prediction-*.nii.gz")): print(pred_path.name) lab_file = pred_path.stem.replace("prediction", "segmentation") lab_path = misc.find_file(label_dirs, lab_file) result = array_kits.merge_labels( nii_kits.nii_reader(pred_path)[1], [0, 2]) reference = array_kits.merge_labels( nii_kits.nii_reader(lab_path)[1], [0, 2]) fps, tps = array_kits.find_tp_and_fp(result, reference) for x in fps: print(x, [x[3] - x[0], x[4] - x[1], x[5] - x[2]]) print() counter += len(fps) for x in tps: print(x, [x[3] - x[0], x[4] - x[1], x[5] - x[2]]) print("#" * 80) all_bboxes[int(pred_path.stem.replace(".nii", "").split("-")[-1])] = { "fps": fps, "tps": tps } print("FPs: {}".format(counter)) with save_path.open("wb") as f: pickle.dump(all_bboxes, f, pickle.HIGHEST_PROTOCOL)
def dump_all_tumor_bbox(): save_file = Path(__file__).parent.parent / "data/LiTS/tumor_summary.csv" disc = ndi.generate_binary_structure(3, connectivity=1) info = collections.defaultdict(list) for lits in LiTS_ROOTS: for mask_path in sorted(lits.glob("segmentation-*.nii")): hdr, mask = nii_kits.nii_reader(mask_path) sx, sy, sz = hdr["srow_x"][0], hdr["srow_y"][1], hdr["srow_z"][2] voxel = abs(sx * sy * sz) mask = array_kits.merge_labels(mask, [0, 2]) labeled, num_obj = ndi.label(mask, disc) objs = ndi.find_objects(labeled) for i, obj in enumerate(objs): bbox = array_kits.slices_to_bbox(obj) area = np.sum(labeled == i + 1) * voxel info["PID"].append(mask_path.name) info["TID"].append(i) info["min_x"].append(bbox[2]) info["min_y"].append(bbox[1]) info["min_z"].append(bbox[0]) info["max_x"].append(bbox[5]) info["max_y"].append(bbox[4]) info["max_z"].append(bbox[3]) info["area/cc"].append(area) info["num_slices"].append(bbox[3] - bbox[0]) print(mask_path.name, i, bbox, area) pd.DataFrame(data=info).to_csv(str(save_file))
def update_case(self, case_path, **kwargs): self.gt = self.mask = self.mask_ = None self.pred = self.pred_ = None case_path = "prediction-{}.nii.gz".format(int(case_path.split("-")[1])) if Path(case_path).name.endswith(".nii.gz"): ori_file = find_file(self.data_dirs, case_path.replace("prediction", "volume") .replace(".nii.gz", ".nii")) lab_file = find_file(self.data_dirs, case_path.replace("prediction", "segmentation") .replace(".nii.gz", ".nii")) pred_file = find_file([self.pred_dir], case_path) reader = nii_kits.nii_reader else: raise ValueError("Wrong prediction name: {}".format(case_path)) if self.liver_range is None and self.bbox_file.exists(): with self.bbox_file.open("rb") as f: self.liver_range = pickle.load(f) self.meta, self.gt = reader(ori_file) self.pred_ = reader(pred_file)[1].astype(np.int8) self.mask_ = reader(lab_file)[1].astype(np.int8) self.shape = self.gt.shape if self.liver_range is not None: self.bb = self.liver_range[ori_file.name.split(".")[0]][0] ranges = slice(self.get_min_idx(), self.get_max_idx() + 1) self.gt = self.gt[ranges] self.pred_ = self.pred_[ranges] self.mask_ = self.mask_[ranges] np.clip(self.gt, -100, 400, self.gt) self.gt = ((self.gt + 100) * (255 / 500)).astype(np.uint8) self.liver = kwargs.get("liver", True) self.label = kwargs.get("label", 2) if self.liver: self.mask = array_kits.merge_labels(self.mask_, [0, [1, 2]]).astype(np.int8) * 2 self.pred = array_kits.merge_labels(self.pred_, [0, [1, 2]]).astype(np.int8) * 2 else: self.mask = array_kits.merge_labels(self.mask_, [0, 2]).astype(np.int8) * 2 self.pred = array_kits.merge_labels(self.pred_, [0, self.label]).astype(np.int8) * 2 assert self.gt.shape == self.pred.shape and self.gt.shape == self.mask.shape, \ "gt: {}, mask: {}, pred: {}".format(self.gt.shape, self.mask.shape, self.pred.shape)
def run(mgr): _, temp = nii_kits.nii_reader( Path(__file__).parent / "model_dir/016_osmn_in_noise" "/prediction/prediction-113.nii.gz") temp = arr_ops.merge_labels(temp, [0, 2]) temp = temp[arr_ops.bbox_to_slices(mgr.bbox)] temp = ndi.zoom( temp, [1, mgr.shape[0] / temp.shape[1], mgr.shape[1] / temp.shape[2]], order=0)[n:] for x in np.concatenate((temp, np.flip(temp, axis=0)), axis=0): yield x
def convert_to_tp_dataset(dataset, k_split=5, folds_file="k_folds.txt", seed=None, align=1, padding=0, min_bbox_shape=None, prefix="cls-0tp"): file_names = get_lits_list(dataset, False) num_images = len(file_names) k_folds = read_or_create_k_folds( Path(__file__).parent.parent / "data/LiTS/{}".format(folds_file), file_names, k_split, seed) LiTS_records = _get_lits_records_dir() label_reader = SubVolumeReader(np.uint8, extend_channel=False) counter = 1 for i, fold in enumerate(k_folds): output_filename = LiTS_records / "{}-{}-of-{}.tfrecord".format( prefix, i + 1, k_split) with tf.io.TFRecordWriter(str(output_filename)) as writer: for j, image_name in enumerate(fold): image_file = LiTS_Dir / image_name print("\r>> Converting fold {}, {}/{}, {}/{}".format( i + 1, j + 1, len(fold), counter, num_images), end="") seg_file = image_file.parent / image_file.name.replace( "volume", "segmentation") label_reader.read(seg_file) bbox = array_kits.extract_region(label_reader.image(), align, padding, min_bbox_shape) label_reader.bbox = bbox.tolist() tumor_value = np.max(label_reader.image()) indices = np.where( np.max(label_reader.image(), axis=(1, 2)) == tumor_value)[0] label_reader.indices = indices tps = array_kits.find_tp(array_kits.merge_labels( label_reader.image(), [0, 2]), split=True) # list of fps is sorted by z, y, x for example in bbox_to_examples(label_reader, tps): writer.write(example.SerializeToString()) counter += 1 print()
def check_tumor_hist(num=10, xrng=(-200, 250), bins=100, yrng=(0, 0.02), show=True, save_path=None): name = "volume-{}.nii".format(num) image_path = misc.find_file(LiTS_ROOTS, name) print(image_path) mask_path = image_path.parent / image_path.name.replace( "volume", "segmentation") _, image = nii_kits.nii_reader(image_path) _, mask = nii_kits.nii_reader(mask_path) analysis_kits.compute_liver_tumor_hist( image, mask, 1, 2, "{} - total".format(image_path.stem), xrng=xrng, bins=bins, yrng=yrng, show=show, save_path=Path(save_path or "") / "0-total.png") # For each tumor disc = ndi.generate_binary_structure(3, connectivity=1) labeled, num_obj = ndi.label(array_kits.merge_labels(mask, [0, 2]), disc) mask = np.clip(mask, 0, 1) + labeled for i in range(num_obj): z, y, x = np.mean(np.array(np.where(mask == i + 2)), axis=1).astype(np.int32) analysis_kits.compute_liver_tumor_hist( image, mask, 1, 2 + i, "{} - tumor {} xyz: ({}, {}, {})".format(image_path.stem, i + 1, x, y, z), xrng=xrng, bins=bins, yrng=yrng, show=show, save_path=Path(save_path or "") / "{}-tumor.png".format(i + 1))
def setUp(self): data_path = Path(__file__).parent.parent / "data/LiTS/Samples" _, self.image = mhd_kits.mhd_reader(data_path / "T001.mhd") _, self.label = mhd_kits.mhd_reader(data_path / "T001_m.mhd") self.image = array_kits.aug_window_width_level(self.image, 450, 50) self.label = array_kits.merge_labels(self.label, [0, 255, 510])