def read_data(self): if not ph.directory_exists(self._path_to_directory): raise exceptions.DirectoryNotExistent(self._path_to_directory) abs_path_to_directory = os.path.abspath(self._path_to_directory) # Get data filenames of images by finding the prefixes associated # to the slices which are build as filename_slice[0-9]+.nii.gz pattern = "(" + REGEX_FILENAMES + ")" + \ self._prefix_slice + "[0-9]+[.]" + REGEX_FILENAME_EXTENSIONS p = re.compile(pattern) dic_filenames = { p.match(f).group(1): p.match(f).group(0) for f in os.listdir(abs_path_to_directory) if p.match(f) } # Filenames without filename ending as sorted list filenames = natsort.natsorted(dic_filenames.keys(), key=lambda y: y.lower()) # Reduce filenames to be read to selection only if self._image_selection is not None: filenames = [f for f in self._image_selection if f in filenames] self._stacks = [None] * len(filenames) self._slice_transforms_sitk = [None] * len(filenames) for i, filename in enumerate(filenames): # Get slice names associated to stack pattern = "(" + filenames[i] + self._prefix_slice + \ ")([0-9]+)[.]" + REGEX_FILENAME_EXTENSIONS p = re.compile(pattern) # Dictionary linking slice number with filename (without extension) dic_slice_filenames = { int(p.match(f).group(2)): p.match(f).group(1) + p.match(f).group(2) for f in os.listdir(abs_path_to_directory) if p.match(f) } # Build stack from image and its found slices self._stacks[i] = st.Stack.from_slice_filenames( dir_input=self._path_to_directory, prefix_stack=filename, suffix_mask=self._suffix_mask, dic_slice_filenames=dic_slice_filenames) # Read self._slice_transforms_sitk[i] = [ sitk.ReadTransform( os.path.join(self._path_to_directory, "%s.tfm" % dic_slice_filenames[k])) for k in sorted(dic_slice_filenames.keys()) ]
def read_data(self): if not ph.directory_exists(self._path_to_directory): raise exceptions.DirectoryNotExistent(self._path_to_directory) abs_path_to_directory = os.path.abspath(self._path_to_directory) # Get data filenames of images without filename extension pattern = "(" + REGEX_FILENAMES + ")[.]" + REGEX_FILENAME_EXTENSIONS pattern_mask = "(" + REGEX_FILENAMES + ")" + self._suffix_mask + \ "[.]" + REGEX_FILENAME_EXTENSIONS p = re.compile(pattern) p_mask = re.compile(pattern_mask) # TODO: # - If folder contains A.nii and A.nii.gz that ambiguity will not # be detected # - exclude potential mask filenames # - hidden files are not excluded dic_filenames = { p.match(f).group(1): p.match(f).group(0) for f in os.listdir(abs_path_to_directory) if p.match(f) and not p_mask.match(f) } dic_filenames_mask = { p_mask.match(f).group(1): p_mask.match(f).group(0) for f in os.listdir(abs_path_to_directory) if p_mask.match(f) } # Filenames without filename ending as sorted list filenames = natsort.natsorted(dic_filenames.keys(), key=lambda y: y.lower()) self._stacks = [None] * len(filenames) for i, filename in enumerate(filenames): abs_path_image = os.path.join(abs_path_to_directory, dic_filenames[filename]) if filename in dic_filenames_mask.keys(): abs_path_mask = os.path.join(abs_path_to_directory, dic_filenames_mask[filename]) else: ph.print_info("No mask found for '%s'." % (abs_path_image)) abs_path_mask = None self._stacks[i] = st.Stack.from_filename( abs_path_image, abs_path_mask, extract_slices=self._extract_slices)
def _get_path_to_potential_mask(self, file_path): # Build absolute path to directory of image path_to_directory = os.path.dirname(file_path) filename = os.path.basename(file_path) if not ph.directory_exists(path_to_directory): raise exceptions.DirectoryNotExistent(path_to_directory) abs_path_to_directory = os.path.abspath(path_to_directory) # Get absolute path mask to image pattern = "(" + REGEX_FILENAMES + \ ")[.]" + REGEX_FILENAME_EXTENSIONS p = re.compile(pattern) # filename = [p.match(f).group(1) if p.match(file_path)][0] if not file_path.endswith(tuple(ALLOWED_EXTENSIONS)): raise IOError("Input image type not correct. Allowed types %s" % "(" + (", or ").join(ALLOWED_EXTENSIONS) + ")") # Strip extension from filename to find associated mask filename = [ re.sub("." + ext, "", filename) for ext in ALLOWED_EXTENSIONS if file_path.endswith(ext) ][0] pattern_mask = filename + self._suffix_mask + "[.]" + \ REGEX_FILENAME_EXTENSIONS p_mask = re.compile(pattern_mask) filename_mask = [ p_mask.match(f).group(0) for f in os.listdir(abs_path_to_directory) if p_mask.match(f) ] if len(filename_mask) == 0: abs_path_mask = None else: # exclude non-integer valued image as candidate (to avoid using # the same image as mask in case of suffix_mask = '') candidate = os.path.join(abs_path_to_directory, filename_mask[0]) candidate_sitk = sitk.ReadImage(candidate) if "int" in candidate_sitk.GetPixelIDTypeAsString(): abs_path_mask = candidate else: abs_path_mask = None return abs_path_mask
def read_data(self): self._stacks = [None] * len(self._file_paths) for i, file_path in enumerate(self._file_paths): # Build absolute path to directory of image path_to_directory = os.path.dirname(file_path) filename = os.path.basename(file_path) if not ph.directory_exists(path_to_directory): raise exceptions.DirectoryNotExistent(path_to_directory) abs_path_to_directory = os.path.abspath(path_to_directory) # Get absolute path mask to image pattern = "(" + REGEX_FILENAMES + \ ")[.]" + REGEX_FILENAME_EXTENSIONS p = re.compile(pattern) # filename = [p.match(f).group(1) if p.match(file_path)][0] if not file_path.endswith(tuple(ALLOWED_EXTENSIONS)): raise IOError("Input image type not correct. Allowed types %s" % "(" + (", or ").join(ALLOWED_EXTENSIONS) + ")") # Strip extension from filename to find associated mask filename = [re.sub("." + ext, "", filename) for ext in ALLOWED_EXTENSIONS if file_path.endswith(ext)][0] pattern_mask = filename + self._suffix_mask + "[.]" + \ REGEX_FILENAME_EXTENSIONS p_mask = re.compile(pattern_mask) filename_mask = [p_mask.match(f).group(0) for f in os.listdir(abs_path_to_directory) if p_mask.match(f)] if len(filename_mask) == 0: abs_path_mask = None else: abs_path_mask = os.path.join(abs_path_to_directory, filename_mask[0]) self._stacks[i] = st.Stack.from_filename( file_path, abs_path_mask, extract_slices=self._extract_slices)
def read_data(self): if not ph.directory_exists(self._directory): raise exceptions.DirectoryNotExistent(self._directory) # Create absolute path for directory directory = os.path.abspath(self._directory) pattern = "(" + REGEX_FILENAMES + \ ")%s([0-9]+)[.]tfm" % self._suffix_slice p = re.compile(pattern) dic_tmp = {(p.match(f).group(1), int(p.match(f).group(2))): os.path.join(directory, p.match(f).group(0)) for f in os.listdir(directory) if p.match(f)} fnames = list(set([k[0] for k in dic_tmp.keys()])) self._transforms_sitk = {fname: {} for fname in fnames} for (fname, slice_number), path in six.iteritems(dic_tmp): self._transforms_sitk[fname][slice_number] = \ self._get_sitk_transform_from_filepath(path)
def run(self): if not ph.directory_exists(self._dir_motion_correction): raise exceptions.DirectoryNotExistent(self._dir_motion_correction) abs_path_to_directory = os.path.abspath(self._dir_motion_correction) for i in range(len(self._stacks)): stack_name = self._stacks[i].get_filename() # update stack position path_to_stack_transform = os.path.join(abs_path_to_directory, "%s.tfm" % stack_name) if ph.file_exists(path_to_stack_transform): transform_stack_sitk = sitkh.read_transform_sitk( path_to_stack_transform) transform_stack_sitk_inv = sitkh.read_transform_sitk( path_to_stack_transform, inverse=True) self._stacks[i].update_motion_correction(transform_stack_sitk) ph.print_info("Stack '%s': Stack position updated" % stack_name) else: transform_stack_sitk_inv = sitk.Euler3DTransform() # update slice positions pattern_trafo_slices = stack_name + self._prefix_slice + \ "([0-9]+)[.]tfm" p = re.compile(pattern_trafo_slices) dic_slice_transforms = { int(p.match(f).group(1)): os.path.join(abs_path_to_directory, p.match(f).group(0)) for f in os.listdir(abs_path_to_directory) if p.match(f) } slices = self._stacks[i].get_slices() for i_slice in range(self._stacks[i].get_number_of_slices()): if i_slice in dic_slice_transforms.keys(): transform_slice_sitk = sitkh.read_transform_sitk( dic_slice_transforms[i_slice]) transform_slice_sitk = \ sitkh.get_composite_sitk_affine_transform( transform_slice_sitk, transform_stack_sitk_inv) slices[i_slice].update_motion_correction( transform_slice_sitk) # # ------------------------- HACK ------------------------- # # 18 Jan 2019 # # HACK to use results of a previous version where image # # slices were still exported # # (Bug was that after stack intensity correction, the # # previous v2v-reg was not passed on to the final # # registration transform): # import niftymic.base.slice as sl # path_to_slice = re.sub( # ".tfm", ".nii.gz", dic_slice_transforms[i_slice]) # path_to_slice_mask = re.sub( # ".tfm", "_mask.nii.gz", dic_slice_transforms[i_slice]) # slice_sitk = sitk.ReadImage(path_to_slice) # slice_sitk_mask = sitk.ReadImage(path_to_slice_mask) # hack = sl.Slice.from_sitk_image( # # slice_sitk=slice_sitk, # slice_sitk=slice_sitk_mask, # mask for Mask-SRR! # slice_sitk_mask=slice_sitk_mask, # slice_number=slices[i_slice].get_slice_number(), # slice_thickness=slices[i_slice].get_slice_thickness(), # ) # self._stacks[i]._slices[i_slice] = hack # # -------------------------------------------------------- else: self._stacks[i].delete_slice(slices[i_slice]) # print update information ph.print_info("Stack '%s': Slice positions updated " "(%d/%d slices deleted)" % ( stack_name, len(self._stacks[i].get_deleted_slice_numbers()), self._stacks[i].sitk.GetSize()[-1], )) # delete entire stack if all slices were rejected if self._stacks[i].get_number_of_slices() == 0: ph.print_info("Stack '%s' removed as all slices were deleted" % stack_name) self._stacks[i] = None # only return maintained stacks self._stacks = [s for s in self._stacks if s is not None] if len(self._stacks) == 0: raise RuntimeError( "All stacks removed. " "Did you check that the correct motion-correction directory " "was provided?")
def run(self, older_than_v3=False): if not ph.directory_exists(self._dir_motion_correction): raise exceptions.DirectoryNotExistent( self._dir_motion_correction) abs_path_to_directory = os.path.abspath( self._dir_motion_correction) path_to_rejected_slices = os.path.join( abs_path_to_directory, "rejected_slices.json") if ph.file_exists(path_to_rejected_slices): self._rejected_slices = ph.read_dictionary_from_json( path_to_rejected_slices) bool_check = True else: self._rejected_slices = None bool_check = False for i in range(len(self._stacks)): stack_name = self._stacks[i].get_filename() if not older_than_v3: # update stack position path_to_stack_transform = os.path.join( abs_path_to_directory, "%s.tfm" % stack_name) if ph.file_exists(path_to_stack_transform): transform_stack_sitk = sitkh.read_transform_sitk( path_to_stack_transform) transform_stack_sitk_inv = sitkh.read_transform_sitk( path_to_stack_transform, inverse=True) self._stacks[i].update_motion_correction( transform_stack_sitk) ph.print_info( "Stack '%s': Stack position updated" % stack_name) else: transform_stack_sitk_inv = sitk.Euler3DTransform() if self._volume_motion_only: continue # update slice positions pattern_trafo_slices = stack_name + self._prefix_slice + \ "([0-9]+)[.]tfm" p = re.compile(pattern_trafo_slices) dic_slice_transforms = { int(p.match(f).group(1)): os.path.join( abs_path_to_directory, p.match(f).group(0)) for f in os.listdir(abs_path_to_directory) if p.match(f) } slices = self._stacks[i].get_slices() for i_slice in range(self._stacks[i].get_number_of_slices()): if i_slice in dic_slice_transforms.keys(): transform_slice_sitk = sitkh.read_transform_sitk( dic_slice_transforms[i_slice]) transform_slice_sitk = \ sitkh.get_composite_sitk_affine_transform( transform_slice_sitk, transform_stack_sitk_inv) slices[i_slice].update_motion_correction( transform_slice_sitk) else: self._stacks[i].delete_slice(slices[i_slice]) # ----------------------------- HACK ----------------------------- # 18 Jan 2019 # HACK to use results of a previous version where image slices were # still exported. # (There was a bug after stack intensity correction, which resulted # in v2v-reg transforms not being part of in the final registration # transforms; Thus, slice transformations (tfm's) were flawed and # could not be used): else: # Recover suffix for mask pattern = stack_name + self._prefix_slice + \ "[0-9]+[_]([a-zA-Z]+)[.]nii.gz" pm = re.compile(pattern) matches = list(set([pm.match(f).group(1) for f in os.listdir( abs_path_to_directory) if pm.match(f)])) if len(matches) > 1: raise RuntimeError("Suffix mask cannot be determined") suffix_mask = "_%s" % matches[0] # Recover stack path_to_stack = os.path.join( abs_path_to_directory, "%s.nii.gz" % stack_name) path_to_stack_mask = os.path.join( abs_path_to_directory, "%s%s.nii.gz" % ( stack_name, suffix_mask)) stack = st.Stack.from_filename( path_to_stack, path_to_stack_mask) # Recover slices pattern_trafo_slices = stack_name + self._prefix_slice + \ "([0-9]+)[.]tfm" p = re.compile(pattern_trafo_slices) dic_slice_transforms = { int(p.match(f).group(1)): os.path.join( abs_path_to_directory, p.match(f).group(0)) for f in os.listdir(abs_path_to_directory) if p.match(f) } slices = self._stacks[i].get_slices() for i_slice in range(self._stacks[i].get_number_of_slices()): if i_slice in dic_slice_transforms.keys(): path_to_slice = re.sub( ".tfm", ".nii.gz", dic_slice_transforms[i_slice]) path_to_slice_mask = re.sub( ".tfm", "%s.nii.gz" % suffix_mask, dic_slice_transforms[i_slice]) slice_sitk = sitk.ReadImage(path_to_slice) slice_sitk_mask = sitk.ReadImage(path_to_slice_mask) hack = sl.Slice.from_sitk_image( slice_sitk=slice_sitk, # slice_sitk=slice_sitk_mask, # mask for Mask-SRR! slice_sitk_mask=slice_sitk_mask, slice_number=slices[i_slice].get_slice_number(), slice_thickness=slices[ i_slice].get_slice_thickness(), ) self._stacks[i]._slices[i_slice] = hack else: self._stacks[i].delete_slice(slices[i_slice]) self._stacks[i].sitk = stack.sitk self._stacks[i].sitk_mask = stack.sitk_mask self._stacks[i].itk = stack.itk self._stacks[i].itk_mask = stack.itk_mask # ----------------------------------------------------------------- # print update information ph.print_info( "Stack '%s': Slice positions updated " "(%d/%d slices deleted)" % ( stack_name, len(self._stacks[i].get_deleted_slice_numbers()), self._stacks[i].sitk.GetSize()[-1], ) ) # delete entire stack if all slices were rejected if self._stacks[i].get_number_of_slices() == 0: ph.print_info( "Stack '%s' removed as all slices were deleted" % stack_name) self._stacks[i] = None # only return maintained stacks self._stacks = [s for s in self._stacks if s is not None] if len(self._stacks) == 0: raise RuntimeError( "All stacks removed. " "Did you check that the correct motion-correction directory " "was provided?")
def main(): input_parser = InputArgparser( description="Script to show the evaluated similarity between " "simulated stack from obtained reconstruction and original stack. " "This function takes the result of " "evaluate_simulated_stack_similarity.py as input. " "Provide --dir-output in order to save the results." ) input_parser.add_dir_input(required=True) input_parser.add_dir_output(required=False) args = input_parser.parse_args() input_parser.print_arguments(args) if not ph.directory_exists(args.dir_input): raise exceptions.DirectoryNotExistent(args.dir_input) # --------------------------------Read Data-------------------------------- pattern = "Similarity_(" + REGEX_FILENAMES + ")[.]txt" p = re.compile(pattern) dic_filenames = { p.match(f).group(1): p.match(f).group(0) for f in os.listdir(args.dir_input) if p.match(f) } dic_stacks = {} for filename in dic_filenames.keys(): path_to_file = os.path.join(args.dir_input, dic_filenames[filename]) # Extract evaluated measures written as header in second line measures = open(path_to_file).readlines()[1] measures = re.sub("#\t", "", measures) measures = re.sub("\n", "", measures) measures = measures.split("\t") # Extract errors similarities = np.loadtxt(path_to_file, skiprows=2) # Build dictionary holding all similarity information for stack dic_stack_similarity = { measures[i]: similarities[:, i] for i in range(len(measures)) } # dic_stack_similarity["measures"] = measures # Store information of to dictionary dic_stacks[filename] = dic_stack_similarity # -----------Visualize stacks individually per similarity measure---------- ctr = [0] N_stacks = len(dic_stacks) N_measures = len(measures) rows = 2 if N_measures < 6 else 3 filenames = natsorted(dic_stacks.keys(), key=lambda y: y.lower()) for i, filename in enumerate(filenames): fig = plt.figure(ph.add_one(ctr)) fig.clf() for m, measure in enumerate(measures): ax = plt.subplot(rows, np.ceil(N_measures/float(rows)), m+1) y = dic_stacks[filename][measure] x = range(1, y.size+1) lines = plt.plot(x, y) line = lines[0] line.set_linestyle("") line.set_marker(ph.MARKERS[0]) # line.set_markerfacecolor("w") plt.xlabel("Slice") plt.ylabel(measure) ax.set_xticks(x) if measure in ["SSIM", "NCC"]: ax.set_ylim([0, 1]) plt.suptitle(filename) try: # Open windows (and also save them) in full screen manager = plt.get_current_fig_manager() manager.full_screen_toggle() except: pass plt.show(block=False) if args.dir_output is not None: filename = "Similarity_%s.pdf" % filename ph.save_fig(fig, args.dir_output, filename) # -----------All in one (meaningful in case of similar scaling)---------- fig = plt.figure(ph.add_one(ctr)) fig.clf() data = {} for m, measure in enumerate(measures): for i, filename in enumerate(filenames): similarities = dic_stacks[filename][measure] labels = [filename] * similarities.size if m == 0: if "Stack" not in data.keys(): data["Stack"] = labels else: data["Stack"] = np.concatenate((data["Stack"], labels)) if measure not in data.keys(): data[measure] = similarities else: data[measure] = np.concatenate( (data[measure], similarities)) df_melt = pd.DataFrame(data).melt( id_vars="Stack", var_name="", value_name=" ", value_vars=measures, ) ax = plt.subplot(1, 1, 1) b = sns.boxplot( data=df_melt, hue="Stack", # different colors for different "Stack" x="", y=" ", order=measures, ) ax.set_axisbelow(True) try: # Open windows (and also save them) in full screen manager = plt.get_current_fig_manager() manager.full_screen_toggle() except: pass plt.show(block=False) if args.dir_output is not None: filename = "Boxplot.pdf" ph.save_fig(fig, args.dir_output, filename) # # -------------Boxplot: Plot individual similarity measures v1---------- # for m, measure in enumerate(measures): # fig = plt.figure(ph.add_one(ctr)) # fig.clf() # data = {} # for i, filename in enumerate(filenames): # similarities = dic_stacks[filename][measure] # labels = [filename] * similarities.size # if "Stack" not in data.keys(): # data["Stack"] = labels # else: # data["Stack"] = np.concatenate((data["Stack"], labels)) # if measure not in data.keys(): # data[measure] = similarities # else: # data[measure] = np.concatenate( # (data[measure], similarities)) # df_melt = pd.DataFrame(data).melt( # id_vars="Stack", # var_name="", # value_name=measure, # ) # ax = plt.subplot(1, 1, 1) # b = sns.boxplot( # data=df_melt, # hue="Stack", # different colors for different "Stack" # x="", # y=measure, # ) # ax.set_axisbelow(True) # plt.show(block=False) # # -------------Boxplot: Plot individual similarity measures v2---------- # for m, measure in enumerate(measures): # fig = plt.figure(ph.add_one(ctr)) # fig.clf() # data = {} # for i, filename in enumerate(filenames): # similarities = dic_stacks[filename][measure] # labels = [filename] * len(filenames) # if filename not in data.keys(): # data[filename] = similarities # else: # data[filename] = np.concatenate( # (data[filename], similarities)) # for filename in filenames: # data[filename] = pd.Series(data[filename]) # df = pd.DataFrame(data) # df_melt = df.melt( # var_name="", # value_name=measure, # value_vars=filenames, # ) # ax = plt.subplot(1, 1, 1) # b = sns.boxplot( # data=df_melt, # x="", # y=measure, # order=filenames, # ) # ax.set_axisbelow(True) # plt.show(block=False) return 0