Ejemplo n.º 1
0
    def generate_transformations(self, nifti_reg_options=' -speeeeed '):
        """
        Generate transformation to build the adjacency matrix S
        :return: provides <reference>_<floating>.txt transformations in pfo_output
        """
        cmd_1 = 'mkdir -p {0} '.format(self.pfo_warped)
        cmd_2 = 'mkdir -p {0} '.format(self.pfo_transformations)
        os.system(cmd_1)
        os.system(cmd_2)

        for i, j in self.graph_connections:
            fname_i_j = self.subjects_id[i] + '_' + self.subjects_id[j]
            pfi_aff_i_j = jph(self.pfo_transformations, fname_i_j + '.txt')
            pfi_res_i_j = jph(self.pfo_warped, fname_i_j + '.nii.gz')
            if self.list_pfi_registration_masks is None:
                cmd_reg_i_j = 'reg_aladin -ref {0} -flo {1} -aff {2} -res {3} {4} '.format(
                            self.pfi_list_subjects_to_coregister[i], self.pfi_list_subjects_to_coregister[j],
                            pfi_aff_i_j, pfi_res_i_j, nifti_reg_options)
            else:
                cmd_reg_i_j = 'reg_aladin -ref {0} -rmask {1} -flo {2} -fmask {3} -aff {4} -res {5} {6} '.format(
                    self.pfi_list_subjects_to_coregister[i], self.list_pfi_registration_masks[i],
                    self.pfi_list_subjects_to_coregister[j], self.list_pfi_registration_masks[j],
                    pfi_aff_i_j, pfi_res_i_j, nifti_reg_options)
            print(cmd_reg_i_j)
            os.system(cmd_reg_i_j)
def save_data_into_excel_file_per_subject(sj):

    sj_parameters = pickle.load(open(jph(pfo_subjects_parameters, sj), 'r'))

    study = sj_parameters['study']
    category = sj_parameters['category']
    pfo_records = jph(root_study_rabbits, 'A_data', study, category, sj,
                      'records')
    records_exists = False
    if os.path.exists(pfo_records):
        records_exists = True
    if records_exists:
        # get subject records
        pfi_record = jph(pfo_records, sj + '_record.npy')
        assert os.path.exists(pfi_record)
        # ---------
        store_a_record_in_excel_table(pfi_record, pfi_excel_table_all_data, sj,
                                      study)
        # ---------
        if sj in multi_atlas_subjects:
            pfi_record_template = jph(root_study_rabbits, 'A_data', study,
                                      category, sj, 'records_template',
                                      sj + '_record.npy')
            if os.path.exists(pfi_record_template):
                store_a_record_in_excel_table(pfi_record_template,
                                              pfi_excel_table_all_data, sj,
                                              'Template')
            else:
                msg = 'Record_template folder not present for the subject {} '.format(
                    sj)
                print(msg)

    else:
        msg = 'Record folder not present for the subject {} '.format(sj)
        print(msg)
def iterative_propagator(sp):
    """
    Propagate all the atlas of the multi-atlas on the target according to the spot instance data.
    :param sp: instance of the class Spot.
    :return: for each subjec sj of the multi atlas we have the final segmentation and warped, ready to be stacked:
     'final_{0}_over_{1}_segm.nii.gz'.format(sj, sp.target_name)
     and 'final_{0}_over_{1}_warp.nii.gz'.format(sj, sp.target_name)
    """
    pfo_tmp = sp.scaffoldings_pfo

    # --  AFFINE  --
    affine_propagator(sp)

    # --  NON RIGID  --
    num_nrigid_modalities = len(sp.propagation_options['N_rigid_modalities'])

    if num_nrigid_modalities > 0:
        # -- call non-rigid propagation:
        non_rigid_propagator(sp)

        resulting_segmentations_pfi_list = [jph(pfo_tmp, 'segm_moving_nrigid_warp_{0}_on_target_{1}.nii.gz').format(sj, sp.target_name) for sj in sp.atlas_list_charts_names]
    else:
        resulting_segmentations_pfi_list = [jph(pfo_tmp, 'segm_moving_aff_warp_{0}_on_target_{1}.nii.gz').format(sj, sp.target_name) for sj in sp.atlas_list_charts_names]

    # -- SMOOTHING RESULTING SEGMENTATION --
    if sp.propagation_options['Final_smoothing_factor'] > 0 and sp.propagation_controller['Smooth_results']:
        for p in resulting_segmentations_pfi_list:
            assert os.path.exists(p), p
            p_new = p.replace('.nii.gz', '_SMOL.nii.gz')
            cmd = 'seg_maths {0} -smol {1} {2}'.format(p, sp.propagation_options['Final_smoothing_factor'], p_new)
            print_and_run(cmd)
Ejemplo n.º 4
0
def extract_brain_tissue_in_NI_multi_atlas():
    """
    From the existing multi-atlas with the parcellation, this method the binary mask for the brain tissue.
    This is performed for each subject.
    Multi-atlas considered is the one located at the global variable root_atlas
    :return:
    """

    for atlas_sj in defs.multi_atlas_subjects:

        print('Creating brain tissue for subject {} in NI multi atlas '.format(atlas_sj))

        pfi_segm = jph(defs.root_atlas, atlas_sj, 'segm', '{}_segm.nii.gz'.format(atlas_sj))
        assert os.path.exists(pfi_segm)

        pfi_brain_tissue = jph(defs.root_atlas, atlas_sj, 'masks', '{}_brain_tissue.nii.gz'.format(atlas_sj))

        print_and_run('cp {0} {1}'.format(pfi_segm, pfi_brain_tissue))

        cmd = 'seg_maths {0} -bin {0}; '   \
              'seg_maths {0} -dil 1 {0}; ' \
              'seg_maths {0} -fill {0}; '  \
              'seg_maths {0} -ero 1 {0} '.format(pfi_brain_tissue)

        print_and_run(cmd)
def test_save_multi_labels_descriptor_custom():
    # load it into a labels descriptor manager
    ldm_lr = LabelsDescriptorManager(
        jph(pfo_tmp_test, 'labels_descriptor_RL.txt'))

    # save it as labels descriptor text file
    pfi_multi_ld = jph(pfo_tmp_test, 'multi_labels_descriptor_LR.txt')
    ldm_lr.save_as_multi_label_descriptor(pfi_multi_ld)

    # expected lines:
    expected_lines = [['background', 0], ['label A Left', 1],
                      ['label A Right', 2], ['label A', 1, 2],
                      ['label B Left', 3], ['label B Right', 4],
                      ['label B', 3, 4], ['label C', 5], ['label D', 6],
                      ['label E Left', 7], ['label E Right', 8],
                      ['label E', 7, 8]]

    # load saved labels descriptor
    with open(pfi_multi_ld, 'r') as g:
        multi_ld_lines = g.readlines()

    # modify as list of lists as the expected lines.
    multi_ld_lines_a_list_of_lists = [[
        int(a) if a.isdigit() else a
        for a in [n.strip() for n in m.split('&') if not n.startswith('#')]
    ] for m in multi_ld_lines]
    # Compare:
    for li1, li2 in zip(expected_lines, multi_ld_lines_a_list_of_lists):
        assert li1 == li2
Ejemplo n.º 6
0
def from_values_below_S0_in_DWI_to_struct_per_sj(sj):
    """
    strcut is a 3 x timepoints vector with
    [ upper quartile_tp ]_tp
    [ median_tp         ]_tp
    [ lower quartile_tp ]_tp

    :param sj: subject_id
    :return:
    """
    print('from_values_below_S0_in_DWI_to_struct_per_sj {}'.format(sj))
    pfi_input_values = jph(root_output, '{}_below_S0_values.pickle'.format(sj))
    with open(jph(pfi_input_values), 'r') as handle:
        data_sj = pickle.load(handle)
    timepoints = len(data_sj.keys())

    struct = np.zeros([3, timepoints])

    for k_id, k in enumerate(data_sj.keys()):
        print('--- {}'.format(k))
        struct[0, k_id] = np.percentile(data_sj[k], 75)
        struct[1, k_id] = np.median(data_sj[k])
        struct[2, k_id] = np.percentile(data_sj[k], 25)

    pfi_output = jph(root_output, '{}_struct.txt'.format(sj))
    np.savetxt(pfi_output, struct)
def run_create_flipped_multi_atlas(phases):

    if phases[1]:
        # Phase 1) copy the atlas in the folder pfo_atlas_validation_leave_one_out
        cmd = 'mkdir {}'.format(
            path_manager.pfo_atlas_validation_leave_one_out)
        print_and_run(cmd)
        for d in os.listdir(path_manager.pfo_multi_atlas):
            if not d.startswith('.') and not d.startswith('z'):
                cmd = 'cp -r {} {}'.format(
                    jph(path_manager.pfo_multi_atlas, d),
                    path_manager.pfo_atlas_validation_leave_one_out)
                print cmd
                print_and_run(cmd)

    if phases[2]:
        # Phase 2) Flip the multi-atlas in the same folder.
        print(path_manager.atlas_subjects)
        print(path_manager.pfo_atlas_validation_leave_one_out)
        suffix_atlas = 'flip'

        dlm = LdM(
            jph(path_manager.pfo_atlas_validation_leave_one_out,
                'labels_descriptor.txt'))

        flipper(path_manager.pfo_atlas_validation_leave_one_out,
                path_manager.atlas_subjects, suffix_atlas, dlm)
Ejemplo n.º 8
0
 def _check_multi_atlas_structure(self):
     if self.parameters_tag == '' or self.parameters_tag is None or '_' in self.parameters_tag:
         msg = 'parameters_tag can not be empty string or None. Can not contain underscores.'
         raise IOError(msg)
     msg = ''
     for chart_name in self.atlas_list_charts_names:
         for mod_j in self.atlas_list_suffix_modalities:
             p = jph(self.atlas_pfo, chart_name,
                     self.arch_modalities_name_folder,
                     '{0}_{1}.nii.gz'.format(chart_name, mod_j))
             if not os.path.exists(p):
                 msg += 'File {} does not exist. \n'.format(p)
         for mask_id, mask_j in enumerate(self.arch_suffix_masks):
             p = jph(self.atlas_pfo, chart_name,
                     self.arch_masks_name_folder,
                     '{0}_{1}.nii.gz'.format(chart_name, mask_j))
             if not os.path.exists(p):
                 msg += 'File {} does not exist. \n'.format(p)
                 if mask_id == 2 and \
                         (self.propagation_options['N_rigid_slim_reg_mask'] or
                          self.propagation_options['Affine_slim_reg_mask']):
                     msg += '\nFile with brain_mask for the slim mask creation required but not present. \n'.format(
                         p)
         p = jph(
             self.atlas_pfo, chart_name,
             self.arch_segmentations_name_folder,
             '{0}_{1}.nii.gz'.format(chart_name,
                                     self.atlas_segmentation_suffix))
         if not os.path.exists(p):
             msg += 'File {} does not exist. \n'.format(p)
     if msg is not '':
         raise IOError(msg)
 def _initialise_paths(self):
     sj_parameters = pickle.load(
         open(jph(pfo_subjects_parameters, self.subject_name), 'r'))
     study = sj_parameters['study']
     category = sj_parameters['category']
     self.pfo_subject = jph(root_study_rabbits, 'A_data', study, category,
                            self.subject_name)
     self.pfo_report = jph(self.pfo_subject, 'report_stereotaxic')
def test_signature_for_variable_convention_wrong_input_after_initialisation():
    my_ldm = LabelsDescriptorManager(jph(pfo_tmp_test,
                                         'labels_descriptor.txt'),
                                     labels_descriptor_convention='itk-snap')

    with pytest.raises(IOError):
        my_ldm.convention = 'spam'
        my_ldm.save_label_descriptor(
            jph(pfo_tmp_test, 'labels_descriptor_again.txt'))
def test_get_pfi_in_pfi_out():

    tail_a = jph(root_dir, 'tests')
    tail_b = root_dir
    head_a = 'test_auxiliary_methods.py'
    head_b = 'head_b.txt'

    assert_array_equal(get_pfi_in_pfi_out(head_a, None, tail_a, None), (jph(tail_a, head_a), jph(tail_a, head_a)))
    assert_array_equal(get_pfi_in_pfi_out(head_a, head_b, tail_a, None), (jph(tail_a, head_a), jph(tail_a, head_b)))
    assert_array_equal(get_pfi_in_pfi_out(head_a, head_b, tail_a, tail_b), (jph(tail_a, head_a), jph(tail_b, head_b)))
Ejemplo n.º 12
0
 def _check_target_structure(self):
     msg = ''
     # Check modalities:
     list_mods = list(
         set(self.propagation_options['Affine_modalities'] +
             self.propagation_options['N_rigid_modalities']))
     for mod_j in list_mods:
         p = jph(self.target_pfo, self.arch_modalities_name_folder,
                 '{0}_{1}.nii.gz'.format(self.target_name, mod_j))
         if not os.path.exists(p):
             msg += 'File {} does not exist. \n'.format(p)
     # Check modalities are in the multi atlas:
     assert set(list_mods).union(set(
         self.atlas_list_suffix_modalities)) == set(
             self.atlas_list_suffix_modalities)
     # Check single roi mask for all modality:
     p = jph(
         self.target_pfo, self.arch_masks_name_folder,
         '{0}_{1}.nii.gz'.format(self.target_name,
                                 self.arch_suffix_masks[0]))
     if not os.path.exists(p):
         msg += 'File {} does not exist. \n'.format(p)
     # Check specific reg mask for each modality or the single reg mask:
     list_mod_reg_masks = list(
         set(self.propagation_options['Affine_reg_masks'] +
             self.propagation_options['N_rigid_reg_masks']))
     if len(list_mod_reg_masks) > 0:
         for mask_mod_j_category in list_mod_reg_masks:
             p = jph(
                 self.target_pfo, self.arch_masks_name_folder,
                 '{0}_{1}_{2}.nii.gz'.format(self.target_name,
                                             mask_mod_j_category,
                                             self.arch_suffix_masks[1]))
             if not os.path.exists(p):
                 msg += 'File {} does not exist. \n'.format(p)
     else:
         p = jph(
             self.target_pfo, self.arch_masks_name_folder,
             '{0}_{1}.nii.gz'.format(self.target_name,
                                     self.arch_suffix_masks[1]))
         if not os.path.exists(p):
             msg += 'File {} does not exist. \n'.format(p)
     if self.propagation_options[
             'N_rigid_slim_reg_mask'] or self.propagation_options[
                 'Affine_slim_reg_mask']:
         p = jph(
             self.target_pfo, self.arch_masks_name_folder,
             '{0}_{1}.nii.gz'.format(self.target_name,
                                     self.arch_suffix_masks[2]))
         if not os.path.exists(p):
             msg += 'File with brain_mask for the slim mask creation {} required but not present. \n'.format(
                 p)
     if msg is not '':
         raise IOError(msg)
def test_generate_none_list_colour_triples():
    generate_dummy_label_descriptor(jph(pfo_tmp_test, 'labels_descriptor.txt'),
                                    list_labels=range(5),
                                    list_roi_names=['1', '2', '3', '4', '5'],
                                    list_colors_triplets=None)
    loaded_dummy_ldm = LabelsDescriptorManager(
        jph(pfo_tmp_test, 'labels_descriptor.txt'))
    for k in loaded_dummy_ldm.dict_label_descriptor.keys():
        assert len(loaded_dummy_ldm.dict_label_descriptor[k][0]) == 3
        for k_rgb in loaded_dummy_ldm.dict_label_descriptor[k][0]:
            assert 0 <= k_rgb < 256
Ejemplo n.º 14
0
def test_check_pfi_io():
    assert check_pfi_io(root_dir, None)
    assert check_pfi_io(root_dir, root_dir)

    non_existing_file = jph(root_dir, 'non_existing_file.txt')
    file_in_non_existing_folder = jph(
        root_dir, 'non_existing_folder/non_existing_file.txt')

    with assert_raises(IOError):
        check_pfi_io(non_existing_file, None)
    with assert_raises(IOError):
        check_pfi_io(root_dir, file_in_non_existing_folder)
def merge_two_study_folders(pfo_main_study, pfo_secondary_study):
    """
    Move from secondary study to main study with added suffix.
    :param pfo_main_study:
    :param pfo_secondary_study:
    :param suffix:
    :return:
    """
    for name_to_be_moved in list(set(os.listdir(pfo_secondary_study)) - {'.DS_Store'}):
        name_after_move = name_to_be_moved
        cmd = 'mv {} {}'.format(jph(pfo_secondary_study, name_to_be_moved), jph(pfo_main_study, name_after_move))
        print_and_run(cmd)
    print_and_run('rm -r {}'.format(pfo_secondary_study))
def get_g_ratio_per_subjects_as_data_frame(input_subjects_list):
    """
    Each subject has to be provided with a record in the appropriate folder
    From the records of a range of subjects it extracts the data frame with the g-ratio, for the required modalities.

    NOTE: for the g-ratio, the data should be collected individually and not from the record, as
    there is an experimentation phase in progress...

    A record is a data structure as:

    record = {'Info'      : subject_info,
              'Regions'   : regions,
              'LabelsID'  : values_list,
              'NumVoxels' : voxels,
              'vols'      : vols,
              'FAs'       : FAs,
              'ADCs'      : ADCs,
              'g_ratios'  : g_ratios}

    :param input_subjects_list:
    :return: regions_over_g_ratio, a matrix num_regions x subjects having
        regions_over_g_ratio[reg_i, subj_j] = g_ratio per region reg_i of the subject subj_j
    """
    # -- fetch the path to record for each subject in a list:
    records = []

    for sj in input_subjects_list:

        sj_parameters = pickle.load(open(jph(pfo_subjects_parameters, sj), 'r'))

        study = sj_parameters['study']
        category = sj_parameters['category']

        pfi_record_sj = jph(root_study_rabbits, 'A_data', study, category, sj, 'records', sj + '_records.npy')
        assert os.path.exists(pfi_record_sj), 'Subject {} has no record available'.format(sj)
        records.append(np.load(pfi_record_sj).item())

    # --  check all the subjects have the regions label provided in the same order:
    regions_sj0 = records[0]['Regions']
    for rec in records[1:]:
        regions_sj = rec['Regions']
        assert regions_sj == regions_sj0

    # --  get the matrix:
    regions_over_g_ratio = np.zeros([len(regions_sj0), len(input_subjects_list)], dtype=np.float64)

    # TODO

    # --  create the table and the header:

    return regions_over_g_ratio, regions_sj0, input_subjects_list
def open_all_T1_with_semg(sj_name_list,
                          pfo_folder_data=path_manager.pfo_multi_atlas):
    for sj in sj_name_list:
        pfi_T1 = jph(pfo_folder_data, sj, 'mod', '{}_T1.nii.gz'.format(sj))
        pfi_segm = jph(pfo_folder_data, sj, 'segm',
                       '_{}_approved.nii.gz'.format(sj))
        pfi_label_descriptor = jph(pfo_folder_data, 'labels_descriptor.txt')

        cmd = 'itksnap -g {0} '.format(pfi_T1)
        if os.path.exists(pfi_segm):
            cmd += ' -s {0} '.format(pfi_segm)
        if os.path.exists(pfi_label_descriptor):
            cmd += ' -l {0} '.format(pfi_label_descriptor)
        os.system(cmd)
Ejemplo n.º 18
0
def unzip_single_sj(sj, controller):

    print('- Unzip subject {} '.format(sj))

    if sj not in list_all_subjects(pfo_subjects_parameters):
        raise IOError(
            'Subject {} does not have a subject parameter ready.'.format(sj))

    sj_parameters = pickle.load(open(jph(pfo_subjects_parameters, sj), 'r'))

    study = sj_parameters['study']
    category = sj_parameters['category']

    pfi_input_sj_zip = jph(root_study_rabbits, '00_raw_data_zipped', study,
                           category, '{}.zip'.format(sj))
    assert os.path.exists(
        pfi_input_sj_zip), 'Zipped file {} does not exists'.format(
            pfi_input_sj_zip)
    pfo_output = jph(root_study_rabbits, '01_raw_data_unzipped_TMP', study,
                     category)

    unzipper_given_pfi_input_and_pfo_output(pfi_input_sj_zip, pfo_output, sj,
                                            controller)

    # Check for external files
    # |---> Secondary study to be merged. If any unzip it as well.
    sj_exts = sj_parameters['merge_with']
    if sj_exts is not None:
        print(
            '\nExternal files related to subject {} found. Unzippin in started.'
            .format(sj))
        for sj_ext in sj_exts:
            print('Unzipping file {} for subject {}'.format(sj_ext, sj))
            pfi_input_sj_ext_zip = jph(root_study_rabbits,
                                       '00_raw_data_zipped', study, category,
                                       sj_ext + '.zip')

            if not os.path.exists(pfi_input_sj_ext_zip):
                raise IOError(
                    'Declared external study for subject {} in folder {} not found'
                    .format(sj_ext, pfi_input_sj_ext_zip))
            pfo_output_sj_ext = jph(root_study_rabbits,
                                    '01_raw_data_unzipped_TMP', study,
                                    category)

            unzipper_given_pfi_input_and_pfo_output(pfi_input_sj_ext_zip,
                                                    pfo_output_sj_ext, sj_ext,
                                                    controller)

            print('\n\n')
def open_all_same_mod_in_block(sj_name_list,
                               mod='T1',
                               pfo_folder_data=path_manager.pfo_multi_atlas):
    pfi_mod_1 = jph(pfo_folder_data, sj_name_list[0], 'mod',
                    '{0}_{1}.nii.gz'.format(sj_name_list[0], mod))
    cmd = 'itksnap -g {0} '.format(pfi_mod_1)
    if len(sj_name_list) > 1:
        cmd += ' -o '
        for sj in sj_name_list[1:]:
            cmd += ' {} '.format(
                jph(pfo_folder_data, sj, 'mod',
                    '{0}_{1}.nii.gz'.format(sj, mod)))
    print cmd
    os.system(cmd)
Ejemplo n.º 20
0
def unzipper_given_pfi_input_and_pfo_output(pfi_in, pfo_out, sj_name,
                                            controller):
    """
    Unzipper auxiliary function related to subject name and controller, with path established a priori.
    Externalised to avoid code repetitions, as called twice.
    :param pfi_in: path to .zip file input
    :param pfo_out: path to folder output where to unzip.
    :param sj_name: usual sj parameter
    :param controller: controller filetered from previous methods.
    :return:
    """
    # Create folder structure:
    if controller['create_tmp_folder_structure']:
        print_and_run('mkdir -p {}'.format(pfo_out))

    # Unzip:
    if controller['unzip']:
        cmd = 'tar -xvf {} -C {}'.format(pfi_in, pfo_out)
        print cmd
        print_and_run(cmd)

    # Rename:
    if controller['rename']:
        file_found = 0
        for p in os.listdir(pfo_out):

            if '_HVDM_{}_'.format(sj_name) in p or '_{}_'.format(
                    sj_name) in p or '_{}_{}_'.format(sj_name[:3],
                                                      sj_name[3:]) in p:
                file_found += 1
                pfi_unzipped_old_name = jph(pfo_out, p)
                pfi_unzipped_new_name = jph(pfo_out, sj_name)
                cmd = 'mv {} {}'.format(pfi_unzipped_old_name,
                                        pfi_unzipped_new_name)
                print_and_run(cmd)
            elif p == str(sj_name):
                # file is already in the correct format
                file_found += 1

        if file_found != 1:
            raise IOError(
                'Unzipped file was saved with a different naming convention. We found {} with no string {} in it. '
                'Manual work required. Check under folder {} (Probably two subjects with the same name? '
                'Probably different covention to save filenames?)'.format(
                    file_found, '_{}_'.format(sj_name)), pfo_out)

        pfi_ds_store_mac = jph(pfo_out, sj_name, '.DS_Store')
        if os.path.exists(pfi_ds_store_mac):
            print_and_run('rm {}'.format(pfi_ds_store_mac))
Ejemplo n.º 21
0
def see_array(in_array,
              pfo_tmp='./z_tmp',
              in_array_segm=None,
              pfi_label_descriptor=None,
              block=False):
    """
    Itk-snap based quick array visualiser.
    :param in_array: numpy array or list of numpy array same dimension (GIGO).
    :param pfo_tmp: path to file temporary folder.
    :param in_array_segm: if there is a single array representing a segmentation (in this case all images must
    have the same shape).
    :param pfi_label_descriptor: path to file to a label descriptor in ITK-snap standard format.
    :param block: if want to stop after each show.
    :return:
    """
    if isinstance(in_array, list):
        assert len(in_array) > 0
        sh = in_array[0].shape
        for arr in in_array[1:]:
            assert sh == arr.shape
        print_and_run('mkdir {}'.format(pfo_tmp))
        cmd = 'itksnap -g '
        for arr_id, arr in enumerate(in_array):
            im = nib.Nifti1Image(arr, affine=np.eye(4))
            pfi_im = jph(pfo_tmp, 'im_{}.nii.gz'.format(arr_id))
            nib.save(im, pfi_im)
            if arr_id == 1:
                cmd += ' -o {} '.format(pfi_im)
            else:
                cmd += ' {} '.format(pfi_im)
    elif isinstance(in_array, np.ndarray):
        print_and_run('mkdir {}'.format(pfo_tmp))
        im = nib.Nifti1Image(in_array, affine=np.eye(4))
        pfi_im = jph(pfo_tmp, 'im_0.nii.gz')
        nib.save(im, pfi_im)
        cmd = 'itksnap -g {}'.format(pfi_im)
    else:
        raise IOError
    if in_array_segm is not None:
        im_segm = nib.Nifti1Image(in_array_segm, affine=np.eye(4))
        pfi_im_segm = jph(pfo_tmp, 'im_segm_0.nii.gz')
        nib.save(im_segm, pfi_im_segm)
        cmd += ' -s {} '.format(pfi_im_segm)
        if pfi_label_descriptor:
            if os.path.exists(pfi_label_descriptor):
                cmd += ' -l {} '.format(pfi_im_segm)
    print_and_run(cmd)
    if block:
        _ = raw_input("Press any key to continue.")
def test_save_in_fsl_convention_reload_as_dict_and_compare():
    ldm_itk = LabelsDescriptorManager(
        jph(pfo_tmp_test, 'labels_descriptor.txt'))
    # change convention
    ldm_itk.convention = 'fsl'
    ldm_itk.save_label_descriptor(
        jph(pfo_tmp_test, 'labels_descriptor_fsl.txt'))

    ldm_fsl = LabelsDescriptorManager(jph(pfo_tmp_test,
                                          'labels_descriptor_fsl.txt'),
                                      labels_descriptor_convention='fsl')

    # NOTE: test works only with default 1.0 values - fsl convention is less informative than itk-snap..
    for k in ldm_itk.dict_label_descriptor.keys():
        ldm_itk.dict_label_descriptor[k] == ldm_fsl.dict_label_descriptor[k]
def test_generate_dummy_labels_descriptor_wrong_input2():
    with pytest.raises(IOError):
        generate_dummy_label_descriptor(
            jph(pfo_tmp_test, 'labels_descriptor.txt'),
            list_labels=range(5),
            list_roi_names=['1', '2', '3', '4', '5'],
            list_colors_triplets=[[0, 0, 0], [1, 1, 1]])
    def wrap(*args, **kwargs):

        if not os.path.exists(pfo_icv_brains):
            os.system('mkdir -p {}'.format(pfo_icv_brains))
            os.system('mkdir -p {}'.format(pfo_icv_output))
            print(
                '\n\nICV Testing: \nGenerating dummy dataset for testing part 1. May take some minutes.'
            )
            for sj_id, [pfi_sj, pfi_segm
                        ] in enumerate(zip(list_pfi_sj, list_pfi_sj_segm)):
                print('\nSubject {}/{}...'.format(sj_id + 1, num_subjects))
                sj, segm = headlike_phantom((71, 71, 71),
                                            intensities=(0.9, 0.3, 0.6, 0.8),
                                            random_perturbation=.4)
                im_sj = nib.Nifti1Image(sj, affine=np.eye(4))
                im_segm = nib.Nifti1Image(segm, affine=np.eye(4))
                nib.save(im_sj, pfi_sj)
                nib.save(im_segm, pfi_segm)

        if not os.path.exists(jph(pfo_icv_output, 'warped')):
            print(
                '\n\nICV Testing: \nGenerate the transformations for the complete graph. May take again some minutes'
            )
            my_icv_estimator = IcvEstimator(list_pfi_sj, pfo_icv_output)
            my_icv_estimator.generate_transformations()

        test_func(*args, **kwargs)
def test_get_multi_label_dict_standard_combine():
    ldm_lr = LabelsDescriptorManager(
        jph(pfo_tmp_test, 'labels_descriptor_RL.txt'))

    multi_labels_dict_from_ldm = ldm_lr.get_multi_label_dict(
        combine_right_left=True)

    expected_multi_labels_dict = collections.OrderedDict()
    expected_multi_labels_dict.update({'background': [0]})
    expected_multi_labels_dict.update({'label A Left': [1]})
    expected_multi_labels_dict.update({'label A Right': [2]})
    expected_multi_labels_dict.update({'label A': [1, 2]})
    expected_multi_labels_dict.update({'label B Left': [3]})
    expected_multi_labels_dict.update({'label B Right': [4]})
    expected_multi_labels_dict.update({'label B': [3, 4]})
    expected_multi_labels_dict.update({'label C': [5]})
    expected_multi_labels_dict.update({'label D': [6]})
    expected_multi_labels_dict.update({'label E Left': [7]})
    expected_multi_labels_dict.update({'label E Right': [8]})
    expected_multi_labels_dict.update({'label E': [7, 8]})

    for k1, k2 in zip(multi_labels_dict_from_ldm.keys(),
                      expected_multi_labels_dict.keys()):
        assert k1 == k2
        assert multi_labels_dict_from_ldm[k1] == expected_multi_labels_dict[k2]
Ejemplo n.º 26
0
    def wrap(*args, **kwargs):

        # 1) Before: create folder
        os.system('mkdir {}'.format(pfo_tmp_test))
        # 1bis) Then, generate dummy descriptor in the generated folder
        descriptor_dummy = \
            """################################################
# ITK-SnAP Label Description File
# File format:
# IDX   -R-  -G-  -B-  -A--  VIS MSH  LABEL
# Fields:
#    IDX:   Zero-based index
#    -R-:   Red color component (0..255)
#    -G-:   Green color component (0..255)
#    -B-:   Blue color component (0..255)
#    -A-:   Label transparency (0.00 .. 1.00)
#    VIS:   Label visibility (0 or 1)
#    IDX:   Label mesh visibility (0 or 1)
#  LABEL:   Label description
################################################
    0     0    0    0        0  0  0    "background"
    1   255    0    0        1  1  1    "label one (l1)"
    2   204    0    0        1  1  1    "label two (l2)"
    3    51   51  255        1  1  1    "label three"
    4   102  102  255        1  1  1    "label four"
    5     0  204   51        1  1  1    "label five (l5)"
    6    51  255  102        1  1  1    "label six"
    7   255  255    0        1  1  1    "label seven"
    8   255  50    50        1  1  1    "label eight" """
        with open(jph(pfo_tmp_test, 'labels_descriptor.txt'), 'w+') as f:
            f.write(descriptor_dummy)
        # 2) Run test
        test_func(*args, **kwargs)
        # 3) After: delete folder and its content
        os.system('rm -r {}'.format(pfo_tmp_test))
Ejemplo n.º 27
0
    def save_results_by_tag(self, parameters_tag='all'):
        """
        Get all the results under tag under arch_automatic_segmentations_name_folder under segm folder of the target.
        if 'all' gathers all the tags.
        :param parameters_tag: if 'all' or None all tags are considered.
        :return:
        """
        def copy_from_folder(pfo_input):
            assert os.path.exists(pfo_input), pfo_input
            # Auxiliary
            for pfi in os.listdir(pfo_input):
                if pfi.startswith(self.arch_approved_segmentation_prefix):
                    cmd = 'cp {0} {1}'.format(
                        pfi,
                        jph(
                            self.target_pfo, self.target_name,
                            self.arch_segmentations_name_folder,
                            pfi.replace(self.arch_approved_segmentation_prefix,
                                        '')))
                    print(cmd)
                    os.system(cmd)

        if parameters_tag == 'all' or None:
            for p in os.listdir(self.target_pfo):
                if p.startswith(self.arch_scaffoldings_name_folder):
                    copy_from_folder(jph(self.target_pfo, p))
        else:
            target_scaffoldings_folder_name_tagged = self.arch_scaffoldings_name_folder + '_' + parameters_tag
            assert os.path.exists(target_scaffoldings_folder_name_tagged
                                  ), target_scaffoldings_folder_name_tagged
            copy_from_folder(target_scaffoldings_folder_name_tagged)
def test_load_save_and_compare():
    ldm = LabelsDescriptorManager(jph(pfo_tmp_test, 'labels_descriptor.txt'))
    ldm.save_label_descriptor(jph(pfo_tmp_test, 'labels_descriptor2.txt'))

    f1 = open(jph(pfo_tmp_test, 'labels_descriptor.txt'), 'r')
    f2 = open(jph(pfo_tmp_test, 'labels_descriptor2.txt'), 'r')

    for l1, l2 in zip(f1.readlines(), f2.readlines()):
        split_l1 = [
            float(a) if is_a_string_number(a) else a
            for a in [a.strip() for a in l1.split(' ') if a is not '']
        ]
        split_l2 = [
            float(b) if is_a_string_number(b) else b
            for b in [b.strip() for b in l2.split(' ') if b is not '']
        ]
        assert split_l1 == split_l2
def generate_phantom_dataset(path_dir):
    if not os.path.exists(jph(path_dir)):
        print(
            '\n\nGenerating dataset for testing: phantom multi-atlas and phantom target in {}. '
            'Will take some minutes.'.format(path_dir))
        os.system('mkdir {}'.format(path_dir))
        os.system('mkdir {}'.format(jph(path_dir, 'MultiAtlas')))
        os.system('mkdir {}'.format(jph(path_dir, 'Targets')))
        generate_multi_atlas(jph(path_dir, 'MultiAtlas'),
                             number_of_subjects=N,
                             multi_atlas_root_name=MULTI_ATLAS_NAME_PREFIX,
                             randomness_shape=RS,
                             randomness_noise=RN)
        generate_atlas(jph(path_dir, 'Targets'),
                       atlas_name='{}01'.format(TARGET_NAME_SUFFIX),
                       randomness_shape=RS,
                       randomness_noise=RN)
def test_save_multi_labels_descriptor_custom_test_robustness():

    # save this as file multi labels descriptor then read and check that it went in order!
    d = collections.OrderedDict()
    d.update({0: [[0, 0, 0], [0, 0, 0], 'background']})
    d.update({1: [[255, 0, 0], [1, 1, 1], 'label A Right']})
    d.update({2: [[204, 0, 0], [1, 1, 1], 'label A Left']})
    d.update({3: [[51, 51, 255], [1, 1, 1], 'label B left']})
    d.update({4: [[102, 102, 255], [1, 1, 1], 'label B Right']})
    d.update({5: [[0, 204, 51], [1, 1, 1], 'label C ']})
    d.update({6: [[51, 255, 102], [1, 1, 1],
                  'label D Right']})  # unpaired label
    d.update({7: [[255, 255, 0], [1, 1, 1],
                  'label E right  ']})  # small r and spaces
    d.update({8: [[255, 50, 50], [1, 1, 1],
                  'label E Left  ']})  # ... paired with small l and spaces

    with open(jph(pfo_tmp_test, 'labels_descriptor_RL.txt'), 'w+') as f:
        for j in d.keys():
            line = '{0: >5}{1: >6}{2: >6}{3: >6}{4: >9}{5: >6}{6: >6}    "{7}"\n'.format(
                j, d[j][0][0], d[j][0][1], d[j][0][2], d[j][1][0], d[j][1][1],
                d[j][1][2], d[j][2])
            f.write(line)

    # load it with an instance of LabelsDescriptorManager
    ldm_lr = LabelsDescriptorManager(
        jph(pfo_tmp_test, 'labels_descriptor_RL.txt'))
    multi_labels_dict_from_ldm = ldm_lr.get_multi_label_dict(
        combine_right_left=True)

    expected_multi_labels_dict = collections.OrderedDict()
    expected_multi_labels_dict.update({'background': [0]})
    expected_multi_labels_dict.update({'label A Right': [1]})
    expected_multi_labels_dict.update({'label A Left': [2]})
    expected_multi_labels_dict.update({'label A': [1, 2]})
    expected_multi_labels_dict.update({'label B left': [3]})
    expected_multi_labels_dict.update({'label B Right': [4]})
    expected_multi_labels_dict.update({'label C': [5]})
    expected_multi_labels_dict.update({'label D Right': [6]})
    expected_multi_labels_dict.update({'label E right': [7]})
    expected_multi_labels_dict.update({'label E Left': [8]})

    for k1, k2 in zip(multi_labels_dict_from_ldm.keys(),
                      expected_multi_labels_dict.keys()):
        assert k1 == k2
        assert multi_labels_dict_from_ldm[k1] == expected_multi_labels_dict[k2]