def __init__(self, img, level_min=1, level_max=256, threshold=None): """ initialize :param img: normalized image :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.n_level = (level_max - level_min) + 1 self.level_min = level_min self.level_max = level_max hist, bin_edges = np.histogram(self.img.flatten(), bins=self.n_level, range=[level_min, level_max], density=False) self.hist = np.array(hist) hist, bin_edges = np.histogram(self.img.flatten(), bins=self.n_level, range=[level_min, level_max], density=True) self.bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2 self.p_glha = np.array(hist) self.features = self._calc_features()
def __init__(self, img, theta=[0, 1], level_min=1, level_max=256, threshold=None): """ initialize :param img: normalized image :param theta: definition of neighbor :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ assert len(img.shape) == 2, 'image must be 2D' self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.n_level = (level_max - level_min) + 1 self.level_min = level_min self.level_max = level_max self.theta = theta self.matrix = self._construct_matrix() self.features = self._calc_features()
def __init__(self, img, d=1, level_min=1, level_max=256, threshold=None): """ initialize :param img: normalized image :param d: distance :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ assert len(img.shape) == 2, 'image must be 2D' self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.n_level = (level_max - level_min) + 1 self.level_min = level_min self.level_max = level_max self.d = d self.s, self.p, self.ng, self.n2 = self._construct_matrix() self.features = self._calc_features()
def __init__(self, img, level_min=1, level_max=256, threshold=None): """ initialize :param img: normalized image :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ assert len(img.shape) == 3, 'image must be 3D' assert level_min > 0, 'min level mast be greater than 0.' self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.n_level = (level_max - level_min) + 1 self.min_level = level_min self.max_level = level_max self.min_zone_size = None self.max_zone_size = None self.matrix, self.zone_sizes = self._construct_matrix() self.features = self._calc_features()
def __init__(self, img, d=1, level_min=1, level_max=256, threshold=None): """ initialize :param img: normalized image :param d: distance from center :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ assert len(img.shape) == 3, 'image must be 3D' self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.n_level = (level_max - level_min) + 1 self.level_min = level_min self.level_max = level_max self.d = d if self.d > 1: raise Exception("d>1 has not been implemented yet....") self.matrix_non_norm = self._construct_matrix() self.matrix = self.matrix_non_norm / self.matrix_non_norm.sum() self.features = self._calc_features()
def __init__(self, img, d=1, level_min=1, level_max=127, threshold=None): """ initialize :param img: 3D image :param d: distance :param level_min: min intensity of normalized image :param level_max: max intensity of normalized image :param threshold: threshold of the minimal value """ assert len(img.shape) == 3, 'image must be 3D' self.img, self.slope, self.intercept = \ normalize(img, level_min, level_max, threshold) self.img[self.img<level_min] = 0 self.n_level = (level_max - level_min) + 1 self.level_min = level_min self.level_max = level_max assert self.level_min > 0, 'lower level must be greater than 0' self.d = d assert self.d > 0, 'd must be grater than 0' self.s, self.p, self.ng, self.n2 = self._construct_matrix() self.features = self._calc_features()
def main(): parser = argparse.ArgumentParser( description='Texture Analysis test tool for PET images') parser.add_argument('--num_levels', '-n', type=int, default=64, help='Number of gray levels') parser.add_argument('--d_glcm', type=int, default=1, help='Distance parameter value of GLCM') parser.add_argument('--d_ngtdm', type=int, default=1, help='Distance parameter value of NGTDM') parser.add_argument('--data_dir_path', '-d', type=str, default='./data/PA*', help='Directory of target data files.') parser.add_argument('--out', '-o', type=str, default='./results', help='Directory to output the results') parser.add_argument('--save_voi_as_dicom', action='store_true', default=False, dest='save_voi_as_dicom', help='Save VOI image as dicom') parser.add_argument('--save_matrix_as_png', action='store_true', default=False, dest='save_matrix_as_png', help='Save matrix as image data') args = parser.parse_args() if not os.path.exists(args.out): os.mkdir(args.out) target = args.data_dir_path target = os.path.join(target, 'ST0/*ctr*[!_].npy') files = glob.glob(target) files.sort() print(files) files = np.array(files) all_data = [] for i in range(len(files)): data = np.load(files[i]) json_fname = copy.deepcopy(files[i]).replace('.npy', '_meta_data_.json') with open(json_fname.replace('\\', '/'), 'r') as f: json_data = json.load(f) patient_name = json_data['patient_name'] series_description = json_data['series_description'] radiopharmaceutical_info = json_data['radiopharmaceutical'] if radiopharmaceutical_info.find('FDG') >= 0: radiopharmaceutical_info = 'FDG' elif radiopharmaceutical_info.find('FLT') >= 0: radiopharmaceutical_info = 'FLT' ref_roi_number = json_data['ref_roi_number'] if ( json_data['ref_roi_number'] >= 0) else 'all' n_voxels = json_data['n_voxels'] suv_conversion_coeff = json_data['SUV_conversion_coeff'] voi_id = json_data['target_ctr_idx'] if ( json_data['target_ctr_idx'] >= 0) else 'all' voi_volume = json_data['roi_volume'] voi_min_value = np.unique(data[data >= 0]).min() voi_max_value = np.unique(data[data >= 0]).max() voi_mean_value = data[data >= 0].mean() voi_var_value = data[data >= 0].var() voi_info_labels = [ 'name', 'radiopharmaceutical_info', 'roi_series_description', 'voi_id', 'roi_num', 'data_filename', 'voi_min_value', 'voi_max_value', 'voi_mean_value', 'voi_var_value', 'voi_volume [ml]', 'n_voxels' ] voi_info_values = [ patient_name, radiopharmaceutical_info, series_description, voi_id, ref_roi_number, os.path.basename(files[i]), voi_min_value, voi_max_value, voi_mean_value, voi_var_value, voi_volume, n_voxels ] if n_voxels <= 1: print(f"Error!\n Number of voxel was {n_voxels}. {files[i]}") continue scale = args.num_levels non_croppped_roi_fname = \ copy.deepcopy(files[i]).replace('.npy', '_non_cropped_.npy') non_masked_roi_fname = \ copy.deepcopy(files[i]).replace('.npy', '_non_cropped_non_masked_.npy') non_cropped_roi = np.load(non_croppped_roi_fname) non_masked_roi = np.load(non_masked_roi_fname) non_cropped_roi, _, _ = normalize(non_cropped_roi, 0, scale - 1, voi_min_value) if args.save_voi_as_dicom: convert_npy_to_dicom( '{}/{}_'.format(args.out, radiopharmaceutical_info) + os.path.basename( copy.deepcopy(files[i])[0:-9] + 'roi_no_{}_norm_.dcm'.format(ref_roi_number)), non_cropped_roi, pixel_spacing=json_data['pixel_spacing'], slice_thickness=json_data['slice_thickness'], ) convert_npy_to_dicom( '{}/{}_'.format(args.out, radiopharmaceutical_info) + os.path.basename( copy.deepcopy(files[i])[0:-9] + 'roi_no_{}_org_.dcm'.format(ref_roi_number)), non_masked_roi, pixel_spacing=json_data['pixel_spacing'], slice_thickness=json_data['slice_thickness'], ) glha = GLHA(data.flatten(), level_min=0, level_max=scale - 1, threshold=voi_min_value) glha_labels, glha_values = glha.print_features() assert glha.hist.sum() == (data>=voi_min_value).sum(), \ "{} {} {}".format(n_voxels, glha.hist.sum(), (data>=voi_min_value).sum()) glcm = GLCM_3D(data, d=args.d_glcm, level_min=0, level_max=scale - 1, threshold=voi_min_value) glcm_labels, glcm_values = glcm.print_features(show_figure=False) ngtdm = NGTDM_3D(data, d=args.d_ngtdm, level_min=1, level_max=scale, threshold=voi_min_value) ntdm_labels, ntdm_values = ngtdm.print_features(show_figure=False) glszm = GLSZM_3D(data, level_min=1, level_max=scale, threshold=voi_min_value) glszm_labels, glszm_values = glszm.print_features(show_figure=False) labels = voi_info_labels + glha_labels + glcm_labels + ntdm_labels + glszm_labels values = voi_info_values + glha_values + glcm_values + ntdm_values + glszm_values df = pd.DataFrame([values]) df.columns = labels if args.save_matrix_as_png: save_mat_as_png(glcm, ngtdm, glszm, glha, patient_name, ref_roi_number, series_description, n_voxels, args.out, radiopharmaceutical_info) all_data.append(df) filename = '{}/{}_results.xlsx'.format( args.out, datetime.datetime.now().strftime('%Y-%m-%d-%H%M%S')) writer = pd.ExcelWriter(filename) all_data = pd.concat(all_data) all_data.to_excel( writer, sheet_name='results', index=False, ) conf = pd.DataFrame( np.array([ 'Num gray levels: %d' % scale, 'Distance (GLCM): %d' % args.d_glcm, 'Distance (NGTDM): %d' % args.d_ngtdm ]).reshape(-1, 1)) conf.to_excel( writer, sheet_name='parameters', header=False, index=False, ) writer.save()