Exemple #1
0
    def write_similarities(self, directory):

        # Store information in array
        similarities_nda = np.zeros((len(self._stacks), len(self._measures)))
        filenames = []
        for i_stack, stack in enumerate(self._stacks):
            similarities_nda[i_stack, :] = np.array(
                [self._similarities[m][i_stack] for m in self._measures])
            filenames.append(stack.get_filename())

        # Build header of files
        header = "# Ref: %s, Ref-Mask: %d, %s \n" % (
            self._reference.get_filename(),
            self._use_reference_mask,
            ph.get_time_stamp(),
        )
        header += "# %s\n" % ("\t").join(self._measures)

        # Get filename paths
        path_to_file_filenames, path_to_file_similarities = self._get_filename_paths(
            directory)

        # Write similarities
        ph.write_to_file(path_to_file_similarities, header)
        ph.write_array_to_file(
            path_to_file_similarities, similarities_nda, verbose=self._verbose)

        # Write stack filenames
        text = header
        text += "%s\n" % "\n".join(filenames)
        ph.write_to_file(path_to_file_filenames, text, verbose=self._verbose)
Exemple #2
0
    def write_landmarks(landmarks_nda, path_to_file, verbose=0):

        extension = ph.strip_filename_extension(path_to_file)[1]
        if extension not in ALLOWED_LANDMARKS:
            raise IOError("Landmark file extension must be of type %s " %
                          ", or ".join(ALLOWED_LANDMARKS))

        ph.write_array_to_file(path_to_file,
                               landmarks_nda,
                               delimiter=" ",
                               access_mode="w",
                               verbose=verbose)
Exemple #3
0
    def write_transform(transform_sitk, path_to_file, verbose=0):

        extension = ph.strip_filename_extension(path_to_file)[1]
        if extension not in ALLOWED_TRANSFORMS and \
                extension not in ALLOWED_TRANSFORMS_DISPLACEMENTS:
            raise IOError("Transform file extension must be of type "
                          "%s (transformation) or %s (displacements)" %
                          (", ".join(ALLOWED_TRANSFORMS),
                           ", ".join(ALLOWED_TRANSFORMS_DISPLACEMENTS)))

        if extension in ALLOWED_TRANSFORMS:
            if isinstance(transform_sitk, sitk.Image):
                raise IOError("Cannot convert displacement field (%s) to "
                              "transform (%s)" % (
                                  ", ".join(ALLOWED_TRANSFORMS_DISPLACEMENTS),
                                  ", ".join(ALLOWED_TRANSFORMS),
                              ))

            if isinstance(transform_sitk, sitk.Transform):
                ph.create_directory(os.path.dirname(path_to_file))
                sitk.WriteTransform(transform_sitk, path_to_file)
                if verbose:
                    ph.print_info("Transform written to '%s'" % path_to_file)
            elif isinstance(transform_sitk, np.ndarray):
                ph.write_array_to_file(path_to_file,
                                       transform_sitk,
                                       delimiter=" ",
                                       access_mode="w",
                                       verbose=verbose)
            else:
                raise IOError("Transform must be of type "
                              "sitk.Transform or np.ndarray")
        else:
            if isinstance(transform_sitk, sitk.Transform):
                raise IOError("Cannot convert transform (%s) to "
                              "displacement field (%s)" % (
                                  ", ".join(ALLOWED_TRANSFORMS),
                                  ", ".join(ALLOWED_TRANSFORMS_DISPLACEMENTS),
                              ))
            elif isinstance(transform_sitk, sitk.Image):
                sitkh.write_nifti_image_sitk(image_sitk=transform_sitk,
                                             path_to_file=path_to_file,
                                             verbose=verbose)
            elif isinstance(transform_sitk, nib.nifti1.Nifti1Image):
                ph.create_directory(os.path.dirname(path_to_file))
                nib.save(transform_sitk, path_to_file)
            else:
                raise IOError("Transform must be of type "
                              "sitk.Image or nibabel.nifti1.Nifti1Image")
    def write_slice_similarities(self, directory):
        for i_stack, stack in enumerate(self._stacks):
            stack_name = stack.get_filename()
            path_to_file = os.path.join(directory, "%s.txt" % stack_name)

            # Write header info
            header = "# %s, %s\n" % (stack.get_filename(), ph.get_time_stamp())
            header += "# %s\n" % ("\t").join(self._measures)
            ph.write_to_file(path_to_file, header, verbose=self._verbose)

            # Write array information
            N_slices = self._get_original_number_of_slices(stack)
            array = np.ones((N_slices, len(self._measures))) * self._init_value
            for i_m, m in enumerate(self._measures):
                array[:, i_m] = self._slice_similarities[stack_name][m]
            ph.write_array_to_file(path_to_file, array, verbose=self._verbose)
Exemple #5
0
    def _run(self):

        # Create and delete all possibly existing files in the directory
        ph.create_directory(self._dir_tmp, delete_files=True)

        sitkh.write_nifti_image_sitk(self._fixed_sitk, self._fixed_str)
        sitkh.write_nifti_image_sitk(self._moving_sitk, self._moving_str)

        if self._fixed_sitk_mask is not None:
            sitkh.write_nifti_image_sitk(
                self._fixed_sitk_mask, self._fixed_mask_str)

        if self._moving_sitk_mask is not None:
            sitkh.write_nifti_image_sitk(
                self._moving_sitk_mask, self._moving_mask_str)

        if self._transform_init is not None:
            ph.write_array_to_file(
                self._transform_init_str,
                self._transform_init,
                access_mode="a",
                verbose=0)
Exemple #6
0
def main():

    # Set print options
    np.set_printoptions(precision=3)
    pd.set_option('display.width', 1000)

    input_parser = InputArgparser(description=".", )
    input_parser.add_filenames(required=True)
    input_parser.add_reference(required=True)
    input_parser.add_reference_mask()
    input_parser.add_dir_output(required=False)
    input_parser.add_measures(
        default=["PSNR", "RMSE", "MAE", "SSIM", "NCC", "NMI"])
    input_parser.add_verbose(default=0)
    args = input_parser.parse_args()
    input_parser.print_arguments(args)

    ph.print_title("Image similarity")
    data_reader = dr.MultipleImagesReader(args.filenames)
    data_reader.read_data()
    stacks = data_reader.get_data()

    reference = st.Stack.from_filename(args.reference, args.reference_mask)

    for stack in stacks:
        try:
            stack.sitk - reference.sitk
        except RuntimeError as e:
            raise IOError(
                "All provided images must be at the same image space")

    x_ref = sitk.GetArrayFromImage(reference.sitk)

    if args.reference_mask is None:
        indices = np.where(x_ref != np.inf)
    else:
        x_ref_mask = sitk.GetArrayFromImage(reference.sitk_mask)
        indices = np.where(x_ref_mask > 0)

    measures_dic = {
        m: lambda x, m=m: SimilarityMeasures.similarity_measures[m]
        (x[indices], x_ref[indices])
        # SimilarityMeasures.similarity_measures[m](x, x_ref)
        for m in args.measures
    }

    observer = obs.Observer()
    observer.set_measures(measures_dic)
    for stack in stacks:
        nda = sitk.GetArrayFromImage(stack.sitk)
        observer.add_x(nda)

    if args.verbose:
        stacks_comparison = [s for s in stacks]
        stacks_comparison.insert(0, reference)
        sitkh.show_stacks(
            stacks_comparison,
            segmentation=reference,
        )

    observer.compute_measures()
    measures = observer.get_measures()

    # Store information in array
    error = np.zeros((len(stacks), len(measures)))
    cols = measures
    rows = []
    for i_stack, stack in enumerate(stacks):
        error[i_stack, :] = np.array([measures[m][i_stack] for m in measures])
        rows.append(stack.get_filename())

    header = "# Ref: %s, Ref-Mask: %d, %s \n" % (
        reference.get_filename(),
        args.reference_mask is None,
        ph.get_time_stamp(),
    )
    header += "# %s\n" % ("\t").join(measures)

    path_to_file_filenames = os.path.join(args.dir_output, "filenames.txt")
    path_to_file_similarities = os.path.join(args.dir_output,
                                             "similarities.txt")

    # Write to files
    ph.write_to_file(path_to_file_similarities, header)
    ph.write_array_to_file(path_to_file_similarities, error, verbose=False)
    text = header
    text += "%s\n" % "\n".join(rows)
    ph.write_to_file(path_to_file_filenames, text)

    # Print to screen
    ph.print_subtitle("Computed Similarities")
    df = pd.DataFrame(error, rows, cols)
    print(df)

    return 0
def main():

    # Read input
    input_parser = InputArgparser(
        description="Script to evaluate the similarity of simulated stack "
        "from obtained reconstruction against the original stack. "
        "This function takes the result of "
        "simulate_stacks_from_reconstruction.py as input.", )
    input_parser.add_filenames(required=True)
    input_parser.add_filenames_masks()
    input_parser.add_dir_output(required=True)
    input_parser.add_suffix_mask(default="_mask")
    input_parser.add_measures(default=["NCC", "SSIM"])
    input_parser.add_option(
        option_string="--prefix-simulated",
        type=str,
        help="Specify the prefix of the simulated stacks to distinguish them "
        "from the original data.",
        default="Simulated_",
    )
    input_parser.add_option(
        option_string="--dir-input-simulated",
        type=str,
        help="Specify the directory where the simulated stacks are. "
        "If not given, it is assumed that they are in the same directory "
        "as the original ones.",
        default=None)
    input_parser.add_slice_thicknesses(default=None)

    args = input_parser.parse_args()
    input_parser.print_arguments(args)

    # --------------------------------Read Data--------------------------------
    ph.print_title("Read Data")

    # Read original data
    filenames_original = args.filenames
    data_reader = dr.MultipleImagesReader(
        file_paths=filenames_original,
        file_paths_masks=args.filenames_masks,
        suffix_mask=args.suffix_mask,
        stacks_slice_thicknesses=args.slice_thicknesses,
    )
    data_reader.read_data()
    stacks_original = data_reader.get_data()

    # Read data simulated from obtained reconstruction
    if args.dir_input_simulated is None:
        dir_input_simulated = os.path.dirname(filenames_original[0])
    else:
        dir_input_simulated = args.dir_input_simulated
    filenames_simulated = [
        os.path.join("%s", "%s%s") %
        (dir_input_simulated, args.prefix_simulated, os.path.basename(f))
        for f in filenames_original
    ]
    data_reader = dr.MultipleImagesReader(filenames_simulated,
                                          suffix_mask=args.suffix_mask)
    data_reader.read_data()
    stacks_simulated = data_reader.get_data()

    for i in range(len(stacks_original)):
        try:
            stacks_original[i].sitk - stacks_simulated[i].sitk
        except:
            raise IOError(
                "Images '%s' and '%s' do not occupy the same space!" %
                (filenames_original[i], filenames_simulated[i]))

    similarity_measures = {
        m: SimilarityMeasures.similarity_measures[m]
        for m in args.measures
    }
    similarities = np.zeros(len(args.measures))

    for i in range(len(stacks_original)):
        nda_3D_original = sitk.GetArrayFromImage(stacks_original[i].sitk)
        nda_3D_simulated = sitk.GetArrayFromImage(stacks_simulated[i].sitk)
        nda_3D_mask = sitk.GetArrayFromImage(stacks_original[i].sitk_mask)

        path_to_file = os.path.join(
            args.dir_output,
            "Similarity_%s.txt" % stacks_original[i].get_filename())
        text = "# Similarity: %s vs %s (%s)." % (os.path.basename(
            filenames_original[i]), os.path.basename(
                filenames_simulated[i]), ph.get_time_stamp())
        text += "\n#\t" + ("\t").join(args.measures)
        text += "\n"
        ph.write_to_file(path_to_file, text, "w")
        for k in range(nda_3D_original.shape[0]):
            x_2D_original = nda_3D_original[k, :, :]
            x_2D_simulated = nda_3D_simulated[k, :, :]

            # zero slice, i.e. rejected during motion correction
            if np.abs(x_2D_simulated).sum() < 1e-6:
                x_2D_simulated[:] = np.nan
            x_2D_mask = nda_3D_mask[k, :, :]

            indices = np.where(x_2D_mask > 0)

            for m, measure in enumerate(args.measures):
                if len(indices[0]) > 0:
                    similarities[m] = similarity_measures[measure](
                        x_2D_original[indices], x_2D_simulated[indices])
                else:
                    similarities[m] = np.nan
            ph.write_array_to_file(path_to_file, similarities.reshape(1, -1))

    return 0