Esempio n. 1
0
def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file):
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    pgpath = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name]["node_information_graphml"]
    reg_path = out_roi_file
    output_dir = op.abspath(op.curdir)

    iflogger.info("Cropping and moving datasets to %s" % output_dir)
    ds = [
        (op.join(fs_dir, "mri", "aseg.nii.gz"), op.join(output_dir, "aseg.nii.gz")),
        (op.join(fs_dir, "mri", "ribbon.nii.gz"), op.join(output_dir, "ribbon.nii.gz")),
        (op.join(fs_dir, "mri", "fsmask_1mm.nii.gz"), op.join(output_dir, "fsmask_1mm.nii.gz")),
        (op.join(fs_dir, "label", "cc_unknown.nii.gz"), op.join(output_dir, "cc_unknown.nii.gz")),
    ]

    ds.append((op.join(op.curdir, "ROI_%s.nii.gz" % parcellation_name), op.join(op.curdir, "ROI_HR_th.nii.gz")))
    orig = op.join(fs_dir, "mri", "orig", "001.mgz")
    for d in ds:
        iflogger.info("Processing %s:" % d[0])
        if not op.exists(d[0]):
            raise Exception("File %s does not exist." % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1])
        runCmd(mri_cmd, log)
Esempio n. 2
0
def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, out_roi_file, dilation):
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    output_dir = op.abspath(op.curdir)

    iflogger.info('Cropping and moving datasets to %s', output_dir)
    ds = [
        (op.join(fs_dir, 'mri', 'aseg.nii.gz'),
         op.abspath('aseg.nii.gz')),
        (op.join(fs_dir, 'mri', 'ribbon.nii.gz'),
         op.abspath('ribbon.nii.gz')),
        (op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz'),
         op.abspath('fsmask_1mm.nii.gz')),
        (op.join(fs_dir, 'label', 'cc_unknown.nii.gz'),
         op.abspath('cc_unknown.nii.gz'))
    ]

    ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name),
               op.abspath('ROI_HR_th.nii.gz')))
    if dilation is True:
        ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name),
                   op.abspath('ROIv_HR_th.nii.gz')))
    orig = op.join(fs_dir, 'mri', 'orig', '001.mgz')
    for d in ds:
        iflogger.info('Processing %s:', d[0])
        if not op.exists(d[0]):
            raise Exception('File %s does not exist.' % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (
            orig, d[0], d[1])
        runCmd(mri_cmd, log)
Esempio n. 3
0
def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name,
                           out_roi_file, dilation):
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    output_dir = op.abspath(op.curdir)

    iflogger.info('Cropping and moving datasets to %s', output_dir)
    ds = [(op.join(fs_dir, 'mri', 'aseg.nii.gz'), op.abspath('aseg.nii.gz')),
          (op.join(fs_dir, 'mri',
                   'ribbon.nii.gz'), op.abspath('ribbon.nii.gz')),
          (op.join(fs_dir, 'mri',
                   'fsmask_1mm.nii.gz'), op.abspath('fsmask_1mm.nii.gz')),
          (op.join(fs_dir, 'label',
                   'cc_unknown.nii.gz'), op.abspath('cc_unknown.nii.gz'))]

    ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name),
               op.abspath('ROI_HR_th.nii.gz')))
    if dilation is True:
        ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name),
                   op.abspath('ROIv_HR_th.nii.gz')))
    orig = op.join(fs_dir, 'mri', 'orig', '001.mgz')
    for d in ds:
        iflogger.info('Processing %s:', d[0])
        if not op.exists(d[0]):
            raise Exception('File %s does not exist.' % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (
            orig, d[0], d[1])
        runCmd(mri_cmd, log)
Esempio n. 4
0
def crop_and_move_WM_and_GM(subjects_dir, subject_id, wm_mask_filename, roi_filename):
    """Taken from cmtk and adapted for running as isolated function in
    this interface.

    http://github.com/tnez/cmp/blob/master/cmp/stages/parcellation/maskcreation.py

    """
    import os.path as op
    import nibabel as ni
    import numpy as np

    fs_dir = op.join(subjects_dir,subject_id)
    output_dir = op.abspath(op.curdir)

    # datasets to crop and move: (from, to)
    ds = [
          (op.join(fs_dir, 'mri', 'fsmask_1mm.nii.gz'), op.join(output_dir, 'fsmask_1mm.nii.gz') ),
          ]

    ds.append( (op.join(fs_dir, 'mri', 'ROIv_%s.nii.gz' % p), op.join(ouput_dir, 'ROIv_HR_th_%s.nii.gz')) )

    orig = op.join(fs_dir, 'mri', 'orig', '001.mgz')

    for d in ds:
        log.info("Processing %s:" % d[0])

        # does it exist at all?
        if not op.exists(d[0]):
            raise Exception('File %s does not exist.' % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1])
        runCmd( mri_cmd,log )
Esempio n. 5
0
def crop_and_move_WM_and_GM(subjects_dir, subject_id, wm_mask_filename, roi_filename):
    """Taken from cmtk and adapted for running as isolated function in
    this interface.

    http://github.com/tnez/cmp/blob/master/cmp/stages/parcellation/maskcreation.py

    """
    import os.path as op
    import nibabel as ni
    import numpy as np

    fs_dir = op.join(subjects_dir, subject_id)
    output_dir = op.abspath(op.curdir)

    # datasets to crop and move: (from, to)
    ds = [(op.join(fs_dir, "mri", "fsmask_1mm.nii.gz"), op.join(output_dir, "fsmask_1mm.nii.gz"))]

    ds.append((op.join(fs_dir, "mri", "ROIv_%s.nii.gz" % p), op.join(ouput_dir, "ROIv_HR_th_%s.nii.gz")))

    orig = op.join(fs_dir, "mri", "orig", "001.mgz")

    for d in ds:
        log.info("Processing %s:" % d[0])

        # does it exist at all?
        if not op.exists(d[0]):
            raise Exception("File %s does not exist." % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1])
        runCmd(mri_cmd, log)
Esempio n. 6
0
def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name,
                           out_roi_file, dilation):
    from cmp.util import runCmd

    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    output_dir = op.abspath(op.curdir)

    iflogger.info("Cropping and moving datasets to %s", output_dir)
    ds = [
        (op.join(fs_dir, "mri", "aseg.nii.gz"), op.abspath("aseg.nii.gz")),
        (op.join(fs_dir, "mri", "ribbon.nii.gz"), op.abspath("ribbon.nii.gz")),
        (op.join(fs_dir, "mri",
                 "fsmask_1mm.nii.gz"), op.abspath("fsmask_1mm.nii.gz")),
        (
            op.join(fs_dir, "label", "cc_unknown.nii.gz"),
            op.abspath("cc_unknown.nii.gz"),
        ),
    ]

    ds.append((
        op.abspath("ROI_%s.nii.gz" % parcellation_name),
        op.abspath("ROI_HR_th.nii.gz"),
    ))
    if dilation is True:
        ds.append((
            op.abspath("ROIv_%s.nii.gz" % parcellation_name),
            op.abspath("ROIv_HR_th.nii.gz"),
        ))
    orig = op.join(fs_dir, "mri", "orig", "001.mgz")
    for d in ds:
        iflogger.info("Processing %s:", d[0])
        if not op.exists(d[0]):
            raise Exception("File %s does not exist." % d[0])
        # reslice to original volume because the roi creation with freesurfer
        # changed to 256x256x256 resolution
        mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (
            orig, d[0], d[1])
        runCmd(mri_cmd, log)
Esempio n. 7
0
def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name):
    iflogger.info("Create the cortical labels necessary for our ROIs")
    iflogger.info("=================================================")
    fs_label_dir = op.join(op.join(subjects_dir, subject_id), 'label')
    output_dir = op.abspath(op.curdir)
    paths = []
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    for hemi in ['lh', 'rh']:
        spath = cmp_config._get_lausanne_parcellation(
            'Lausanne2008')[parcellation_name]['fs_label_subdir_name'] % hemi
        paths.append(spath)
    for p in paths:
        try:
            os.makedirs(op.join('.', p))
        except:
            pass
    if '33' in parcellation_name:
        comp = [
            ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot',
             'regenerated_rh_36', 'myaparc_36'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot',
             'regenerated_lh_36', 'myaparc_36'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '60' in parcellation_name:
        comp = [
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '125' in parcellation_name:
        comp = [
            ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot',
             'regenerated_rh_125', 'myaparc_125'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot',
             'regenerated_lh_125', 'myaparc_125'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '250' in parcellation_name:
        comp = [
            ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot',
             'regenerated_rh_250', 'myaparc_250'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot',
             'regenerated_lh_250', 'myaparc_250'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    else:
        comp = [
            ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot',
             'regenerated_rh_36', 'myaparc_36'),
            ('rh', 'myatlasP1_16_rh.gcs', 'rh.myaparcP1_16.annot',
             'regenerated_rh_500', 'myaparcP1_16'),
            ('rh', 'myatlasP17_28_rh.gcs', 'rh.myaparcP17_28.annot',
             'regenerated_rh_500', 'myaparcP17_28'),
            ('rh', 'myatlasP29_36_rh.gcs', 'rh.myaparcP29_36.annot',
             'regenerated_rh_500', 'myaparcP29_36'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot',
             'regenerated_rh_125', 'myaparc_125'),
            ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot',
             'regenerated_rh_250', 'myaparc_250'),
            ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot',
             'regenerated_lh_36', 'myaparc_36'),
            ('lh', 'myatlasP1_16_lh.gcs', 'lh.myaparcP1_16.annot',
             'regenerated_lh_500', 'myaparcP1_16'),
            ('lh', 'myatlasP17_28_lh.gcs', 'lh.myaparcP17_28.annot',
             'regenerated_lh_500', 'myaparcP17_28'),
            ('lh', 'myatlasP29_36_lh.gcs', 'lh.myaparcP29_36.annot',
             'regenerated_lh_500', 'myaparcP29_36'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
            ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot',
             'regenerated_lh_125', 'myaparc_125'),
            ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot',
             'regenerated_lh_250', 'myaparc_250'),
        ]

    log = cmp_config.get_logger()

    for out in comp:
        mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % (subject_id, out[0],
                                                                               op.join(subjects_dir, subject_id), out[0], cmp_config.get_lausanne_atlas(out[1]), op.join(fs_label_dir, out[2]))
        runCmd(mris_cmd, log)
        iflogger.info('-----------')

        annot = '--annotation "%s"' % out[4]

        mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % (subject_id, out[0], op.join(output_dir, out[3]), annot)
        iflogger.info(mri_an_cmd)
        runCmd(mri_an_cmd, log)
        iflogger.info('-----------')
        iflogger.info(os.environ['SUBJECTS_DIR'])
    # extract cc and unknown to add to tractography mask, we do not want this as a region of interest
    # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?),
    # but for the other scales only, take the ones from _60
        rhun = op.join(output_dir, 'rh.unknown.label')
        lhun = op.join(output_dir, 'lh.unknown.label')
        rhco = op.join(output_dir, 'rh.corpuscallosum.label')
        lhco = op.join(output_dir, 'lh.corpuscallosum.label')
    shutil.copy(
        op.join(output_dir, 'regenerated_rh_60', 'rh.unknown.label'), rhun)
    shutil.copy(
        op.join(output_dir, 'regenerated_lh_60', 'lh.unknown.label'), lhun)
    shutil.copy(op.join(
        output_dir, 'regenerated_rh_60', 'rh.corpuscallosum.label'), rhco)
    shutil.copy(op.join(
        output_dir, 'regenerated_lh_60', 'lh.corpuscallosum.label'), lhco)

    mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o  "%s" --identity """ % (rhun, lhun, rhco, lhco, op.join(op.join(subjects_dir, subject_id), 'mri', 'orig.mgz'), op.join(fs_label_dir, 'cc_unknown.nii.gz'))
    runCmd(mri_cmd, log)
    runCmd('mris_volmask %s' % subject_id, log)
    mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % (op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id))
    runCmd(mri_cmd, log)
    mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % (
        op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id))
    runCmd(mri_cmd, log)

    iflogger.info("[ DONE ]")
Esempio n. 8
0
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """
    iflogger.info("Create the ROIs:")
    output_dir = op.abspath(op.curdir)
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    parval = cmp_config._get_lausanne_parcellation(
        'Lausanne2008')[parcellation_name]
    pgpath = parval['node_information_graphml']
    aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
    asegd = aseg.get_data()

    # identify cortical voxels, right (3) and left (42) hemispheres
    idxr = np.where(asegd == 3)
    idxl = np.where(asegd == 42)
    xx = np.concatenate((idxr[0], idxl[0]))
    yy = np.concatenate((idxr[1], idxl[1]))
    zz = np.concatenate((idxr[2], idxl[2]))

    # initialize variables necessary for cortical ROIs dilation
    # dimensions of the neighbourhood for rois labels assignment (choose odd dimensions!)
    shape = (25, 25, 25)
    center = np.array(shape) // 2
    # dist: distances from the center of the neighbourhood
    dist = np.zeros(shape, dtype='float32')
    for x in range(shape[0]):
        for y in range(shape[1]):
            for z in range(shape[2]):
                distxyz = center - [x, y, z]
                dist[x, y, z] = np.sqrt(np.sum(np.multiply(distxyz, distxyz)))

    iflogger.info("Working on parcellation: ")
    iflogger.info(cmp_config._get_lausanne_parcellation(
        'Lausanne2008')[parcellation_name])
    iflogger.info("========================")
    pg = nx.read_graphml(pgpath)
    # each node represents a brain region
    # create a big 256^3 volume for storage of all ROIs
    rois = np.zeros((256, 256, 256), dtype=np.int16)

    count = 0
    for brk, brv in pg.nodes(data=True):
        count = count + 1
        iflogger.info(brv)
        iflogger.info(brk)
        if brv['dn_hemisphere'] == 'left':
            hemi = 'lh'
        elif brv['dn_hemisphere'] == 'right':
            hemi = 'rh'
        if brv['dn_region'] == 'subcortical':
            iflogger.info(brv)
            iflogger.info('---------------------')
            iflogger.info('Work on brain region: %s', brv['dn_region'])
            iflogger.info('Freesurfer Name: %s', brv['dn_fsname'])
            iflogger.info('Region %s of %s', count, pg.number_of_nodes())
            iflogger.info('---------------------')
            # if it is subcortical, retrieve roi from aseg
            idx = np.where(asegd == int(brv['dn_fs_aseg_val']))
            rois[idx] = int(brv['dn_correspondence_id'])

        elif brv['dn_region'] == 'cortical':
            iflogger.info(brv)
            iflogger.info('---------------------')
            iflogger.info('Work on brain region: %s', brv['dn_region'])
            iflogger.info('Freesurfer Name: %s', brv['dn_fsname'])
            iflogger.info('Region %s of %s', count, pg.number_of_nodes())
            iflogger.info('---------------------')

            labelpath = op.join(
                output_dir, parval['fs_label_subdir_name'] % hemi)
            # construct .label file name

            fname = '%s.%s.label' % (hemi, brv['dn_fsname'])

            # execute fs mri_label2vol to generate volume roi from the label file
            # store it in temporary file to be overwritten for each region

            mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (op.join(labelpath, fname),
                                                                                      op.join(fs_dir, 'mri', 'orig.mgz'), op.join(output_dir, 'tmp.nii.gz'))
            runCmd(mri_cmd, log)

            tmp = nb.load(op.join(output_dir, 'tmp.nii.gz'))
            tmpd = tmp.get_data()

            # find voxel and set them to intensityvalue in rois
            idx = np.where(tmpd == 1)
            rois[idx] = int(brv['dn_correspondence_id'])

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.abspath('ROI_%s.nii.gz' % parcellation_name)

        # update the header
        hdr = aseg.header
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)

        log.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

    iflogger.info("[ DONE ]")
    # dilate cortical regions
    if dilation is True:
        iflogger.info("Dilating cortical regions...")
        # loop throughout all the voxels belonging to the aseg GM volume
        for j in range(xx.size):
            if rois[xx[j], yy[j], zz[j]] == 0:
                local = extract(
                    rois, shape, position=(xx[j], yy[j], zz[j]), fill=0)
                mask = local.copy()
                mask[np.nonzero(local > 0)] = 1
                thisdist = np.multiply(dist, mask)
                thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist)
                value = np.int_(
                    local[np.nonzero(thisdist == np.amin(thisdist))])
                if value.size > 1:
                    counts = np.bincount(value)
                    value = np.argmax(counts)
                rois[xx[j], yy[j], zz[j]] = value

        # store volume eg in ROIv_scale33.nii.gz
        out_roi = op.abspath('ROIv_%s.nii.gz' % parcellation_name)
        iflogger.info('Save output image to %s', out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

        iflogger.info("[ DONE ]")
Esempio n. 9
0
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """
    import cmp
    from cmp.util import runCmd

    iflogger.info("Create the ROIs:")
    output_dir = op.abspath(op.curdir)
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    parval = cmp_config._get_lausanne_parcellation(
        "Lausanne2008")[parcellation_name]
    pgpath = parval["node_information_graphml"]
    aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz"))
    asegd = np.asanyarray(aseg.dataobj)

    # identify cortical voxels, right (3) and left (42) hemispheres
    idxr = np.where(asegd == 3)
    idxl = np.where(asegd == 42)
    xx = np.concatenate((idxr[0], idxl[0]))
    yy = np.concatenate((idxr[1], idxl[1]))
    zz = np.concatenate((idxr[2], idxl[2]))

    # initialize variables necessary for cortical ROIs dilation
    # dimensions of the neighbourhood for rois labels assignment (choose odd dimensions!)
    shape = (25, 25, 25)
    center = np.array(shape) // 2
    # dist: distances from the center of the neighbourhood
    dist = np.zeros(shape, dtype="float32")
    for x in range(shape[0]):
        for y in range(shape[1]):
            for z in range(shape[2]):
                distxyz = center - [x, y, z]
                dist[x, y, z] = np.sqrt(np.sum(np.multiply(distxyz, distxyz)))

    iflogger.info("Working on parcellation: ")
    iflogger.info(
        cmp_config._get_lausanne_parcellation("Lausanne2008")
        [parcellation_name])
    iflogger.info("========================")
    pg = nx.read_graphml(pgpath)
    # each node represents a brain region
    # create a big 256^3 volume for storage of all ROIs
    rois = np.zeros((256, 256, 256), dtype=np.int16)

    count = 0
    for brk, brv in pg.nodes(data=True):
        count = count + 1
        iflogger.info(brv)
        iflogger.info(brk)
        if brv["dn_hemisphere"] == "left":
            hemi = "lh"
        elif brv["dn_hemisphere"] == "right":
            hemi = "rh"
        if brv["dn_region"] == "subcortical":
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s", brv["dn_region"])
            iflogger.info("Freesurfer Name: %s", brv["dn_fsname"])
            iflogger.info("Region %s of %s", count, pg.number_of_nodes())
            iflogger.info("---------------------")
            # if it is subcortical, retrieve roi from aseg
            idx = np.where(asegd == int(brv["dn_fs_aseg_val"]))
            rois[idx] = int(brv["dn_correspondence_id"])

        elif brv["dn_region"] == "cortical":
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s", brv["dn_region"])
            iflogger.info("Freesurfer Name: %s", brv["dn_fsname"])
            iflogger.info("Region %s of %s", count, pg.number_of_nodes())
            iflogger.info("---------------------")

            labelpath = op.join(output_dir,
                                parval["fs_label_subdir_name"] % hemi)
            # construct .label file name

            fname = "%s.%s.label" % (hemi, brv["dn_fsname"])

            # execute fs mri_label2vol to generate volume roi from the label file
            # store it in temporary file to be overwritten for each region

            mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (
                op.join(labelpath, fname),
                op.join(fs_dir, "mri", "orig.mgz"),
                op.join(output_dir, "tmp.nii.gz"),
            )
            runCmd(mri_cmd, log)

            tmp = nb.load(op.join(output_dir, "tmp.nii.gz"))
            tmpd = np.asanyarray(tmp.dataobj)

            # find voxel and set them to intensityvalue in rois
            idx = np.where(tmpd == 1)
            rois[idx] = int(brv["dn_correspondence_id"])

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.abspath("ROI_%s.nii.gz" % parcellation_name)

        # update the header
        hdr = aseg.header
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)

        log.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

    iflogger.info("[ DONE ]")
    # dilate cortical regions
    if dilation is True:
        iflogger.info("Dilating cortical regions...")
        # loop throughout all the voxels belonging to the aseg GM volume
        for j in range(xx.size):
            if rois[xx[j], yy[j], zz[j]] == 0:
                local = extract(rois,
                                shape,
                                position=(xx[j], yy[j], zz[j]),
                                fill=0)
                mask = local.copy()
                mask[np.nonzero(local > 0)] = 1
                thisdist = np.multiply(dist, mask)
                thisdist[np.nonzero(thisdist == 0)] = np.amax(thisdist)
                value = np.int_(
                    local[np.nonzero(thisdist == np.amin(thisdist))])
                if value.size > 1:
                    counts = np.bincount(value)
                    value = np.argmax(counts)
                rois[xx[j], yy[j], zz[j]] = value

        # store volume eg in ROIv_scale33.nii.gz
        out_roi = op.abspath("ROIv_%s.nii.gz" % parcellation_name)
        iflogger.info("Save output image to %s", out_roi)
        img = nb.Nifti1Image(rois, aseg.affine, hdr2)
        nb.save(img, out_roi)

        iflogger.info("[ DONE ]")
Esempio n. 10
0
def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name):
    import cmp
    from cmp.util import runCmd

    iflogger.info("Create the cortical labels necessary for our ROIs")
    iflogger.info("=================================================")
    fs_label_dir = op.join(op.join(subjects_dir, subject_id), "label")
    output_dir = op.abspath(op.curdir)
    paths = []
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    for hemi in ["lh", "rh"]:
        spath = (cmp_config._get_lausanne_parcellation("Lausanne2008")
                 [parcellation_name]["fs_label_subdir_name"] % hemi)
        paths.append(spath)
    for p in paths:
        try:
            os.makedirs(op.join(".", p))
        except:
            pass
    if "33" in parcellation_name:
        comp = [
            (
                "rh",
                "myatlas_36_rh.gcs",
                "rh.myaparc_36.annot",
                "regenerated_rh_36",
                "myaparc_36",
            ),
            (
                "rh",
                "myatlas_60_rh.gcs",
                "rh.myaparc_60.annot",
                "regenerated_rh_60",
                "myaparc_60",
            ),
            (
                "lh",
                "myatlas_36_lh.gcs",
                "lh.myaparc_36.annot",
                "regenerated_lh_36",
                "myaparc_36",
            ),
            (
                "lh",
                "myatlas_60_lh.gcs",
                "lh.myaparc_60.annot",
                "regenerated_lh_60",
                "myaparc_60",
            ),
        ]
    elif "60" in parcellation_name:
        comp = [
            (
                "rh",
                "myatlas_60_rh.gcs",
                "rh.myaparc_60.annot",
                "regenerated_rh_60",
                "myaparc_60",
            ),
            (
                "lh",
                "myatlas_60_lh.gcs",
                "lh.myaparc_60.annot",
                "regenerated_lh_60",
                "myaparc_60",
            ),
        ]
    elif "125" in parcellation_name:
        comp = [
            (
                "rh",
                "myatlas_125_rh.gcs",
                "rh.myaparc_125.annot",
                "regenerated_rh_125",
                "myaparc_125",
            ),
            (
                "rh",
                "myatlas_60_rh.gcs",
                "rh.myaparc_60.annot",
                "regenerated_rh_60",
                "myaparc_60",
            ),
            (
                "lh",
                "myatlas_125_lh.gcs",
                "lh.myaparc_125.annot",
                "regenerated_lh_125",
                "myaparc_125",
            ),
            (
                "lh",
                "myatlas_60_lh.gcs",
                "lh.myaparc_60.annot",
                "regenerated_lh_60",
                "myaparc_60",
            ),
        ]
    elif "250" in parcellation_name:
        comp = [
            (
                "rh",
                "myatlas_250_rh.gcs",
                "rh.myaparc_250.annot",
                "regenerated_rh_250",
                "myaparc_250",
            ),
            (
                "rh",
                "myatlas_60_rh.gcs",
                "rh.myaparc_60.annot",
                "regenerated_rh_60",
                "myaparc_60",
            ),
            (
                "lh",
                "myatlas_250_lh.gcs",
                "lh.myaparc_250.annot",
                "regenerated_lh_250",
                "myaparc_250",
            ),
            (
                "lh",
                "myatlas_60_lh.gcs",
                "lh.myaparc_60.annot",
                "regenerated_lh_60",
                "myaparc_60",
            ),
        ]
    else:
        comp = [
            (
                "rh",
                "myatlas_36_rh.gcs",
                "rh.myaparc_36.annot",
                "regenerated_rh_36",
                "myaparc_36",
            ),
            (
                "rh",
                "myatlasP1_16_rh.gcs",
                "rh.myaparcP1_16.annot",
                "regenerated_rh_500",
                "myaparcP1_16",
            ),
            (
                "rh",
                "myatlasP17_28_rh.gcs",
                "rh.myaparcP17_28.annot",
                "regenerated_rh_500",
                "myaparcP17_28",
            ),
            (
                "rh",
                "myatlasP29_36_rh.gcs",
                "rh.myaparcP29_36.annot",
                "regenerated_rh_500",
                "myaparcP29_36",
            ),
            (
                "rh",
                "myatlas_60_rh.gcs",
                "rh.myaparc_60.annot",
                "regenerated_rh_60",
                "myaparc_60",
            ),
            (
                "rh",
                "myatlas_125_rh.gcs",
                "rh.myaparc_125.annot",
                "regenerated_rh_125",
                "myaparc_125",
            ),
            (
                "rh",
                "myatlas_250_rh.gcs",
                "rh.myaparc_250.annot",
                "regenerated_rh_250",
                "myaparc_250",
            ),
            (
                "lh",
                "myatlas_36_lh.gcs",
                "lh.myaparc_36.annot",
                "regenerated_lh_36",
                "myaparc_36",
            ),
            (
                "lh",
                "myatlasP1_16_lh.gcs",
                "lh.myaparcP1_16.annot",
                "regenerated_lh_500",
                "myaparcP1_16",
            ),
            (
                "lh",
                "myatlasP17_28_lh.gcs",
                "lh.myaparcP17_28.annot",
                "regenerated_lh_500",
                "myaparcP17_28",
            ),
            (
                "lh",
                "myatlasP29_36_lh.gcs",
                "lh.myaparcP29_36.annot",
                "regenerated_lh_500",
                "myaparcP29_36",
            ),
            (
                "lh",
                "myatlas_60_lh.gcs",
                "lh.myaparc_60.annot",
                "regenerated_lh_60",
                "myaparc_60",
            ),
            (
                "lh",
                "myatlas_125_lh.gcs",
                "lh.myaparc_125.annot",
                "regenerated_lh_125",
                "myaparc_125",
            ),
            (
                "lh",
                "myatlas_250_lh.gcs",
                "lh.myaparc_250.annot",
                "regenerated_lh_250",
                "myaparc_250",
            ),
        ]

    log = cmp_config.get_logger()

    for out in comp:
        mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % (
            subject_id,
            out[0],
            op.join(subjects_dir, subject_id),
            out[0],
            cmp_config.get_lausanne_atlas(out[1]),
            op.join(fs_label_dir, out[2]),
        )
        runCmd(mris_cmd, log)
        iflogger.info("-----------")

        annot = '--annotation "%s"' % out[4]

        mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % (
            subject_id,
            out[0],
            op.join(output_dir, out[3]),
            annot,
        )
        iflogger.info(mri_an_cmd)
        runCmd(mri_an_cmd, log)
        iflogger.info("-----------")
        iflogger.info(os.environ["SUBJECTS_DIR"])
        # extract cc and unknown to add to tractography mask, we do not want this as a region of interest
        # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?),
        # but for the other scales only, take the ones from _60
        rhun = op.join(output_dir, "rh.unknown.label")
        lhun = op.join(output_dir, "lh.unknown.label")
        rhco = op.join(output_dir, "rh.corpuscallosum.label")
        lhco = op.join(output_dir, "lh.corpuscallosum.label")
    shutil.copy(op.join(output_dir, "regenerated_rh_60", "rh.unknown.label"),
                rhun)
    shutil.copy(op.join(output_dir, "regenerated_lh_60", "lh.unknown.label"),
                lhun)
    shutil.copy(
        op.join(output_dir, "regenerated_rh_60", "rh.corpuscallosum.label"),
        rhco)
    shutil.copy(
        op.join(output_dir, "regenerated_lh_60", "lh.corpuscallosum.label"),
        lhco)

    mri_cmd = (
        """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o  "%s" --identity """
        % (
            rhun,
            lhun,
            rhco,
            lhco,
            op.join(op.join(subjects_dir, subject_id), "mri", "orig.mgz"),
            op.join(fs_label_dir, "cc_unknown.nii.gz"),
        ))
    runCmd(mri_cmd, log)
    runCmd("mris_volmask %s" % subject_id, log)
    mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % (
        op.join(subjects_dir, subject_id),
        op.join(subjects_dir, subject_id),
    )
    runCmd(mri_cmd, log)
    mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % (
        op.join(subjects_dir, subject_id),
        op.join(subjects_dir, subject_id),
    )
    runCmd(mri_cmd, log)

    iflogger.info("[ DONE ]")
Esempio n. 11
0
def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name):
    iflogger.info("Create the cortical labels necessary for our ROIs")
    iflogger.info("=================================================")
    fs_label_dir = op.join(op.join(subjects_dir, subject_id), 'label')
    output_dir = op.abspath(op.curdir)
    paths = []
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    for hemi in ['lh', 'rh']:
        spath = cmp_config._get_lausanne_parcellation(
            'Lausanne2008')[parcellation_name]['fs_label_subdir_name'] % hemi
        paths.append(spath)
    for p in paths:
        try:
            os.makedirs(op.join('.', p))
        except:
            pass
    if '33' in parcellation_name:
        comp = [
            ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot',
             'regenerated_rh_36', 'myaparc_36'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot',
             'regenerated_lh_36', 'myaparc_36'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '60' in parcellation_name:
        comp = [
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '125' in parcellation_name:
        comp = [
            ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot',
             'regenerated_rh_125', 'myaparc_125'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot',
             'regenerated_lh_125', 'myaparc_125'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    elif '250' in parcellation_name:
        comp = [
            ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot',
             'regenerated_rh_250', 'myaparc_250'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot',
             'regenerated_lh_250', 'myaparc_250'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
        ]
    else:
        comp = [
            ('rh', 'myatlas_36_rh.gcs', 'rh.myaparc_36.annot',
             'regenerated_rh_36', 'myaparc_36'),
            ('rh', 'myatlasP1_16_rh.gcs', 'rh.myaparcP1_16.annot',
             'regenerated_rh_500', 'myaparcP1_16'),
            ('rh', 'myatlasP17_28_rh.gcs', 'rh.myaparcP17_28.annot',
             'regenerated_rh_500', 'myaparcP17_28'),
            ('rh', 'myatlasP29_36_rh.gcs', 'rh.myaparcP29_36.annot',
             'regenerated_rh_500', 'myaparcP29_36'),
            ('rh', 'myatlas_60_rh.gcs', 'rh.myaparc_60.annot',
             'regenerated_rh_60', 'myaparc_60'),
            ('rh', 'myatlas_125_rh.gcs', 'rh.myaparc_125.annot',
             'regenerated_rh_125', 'myaparc_125'),
            ('rh', 'myatlas_250_rh.gcs', 'rh.myaparc_250.annot',
             'regenerated_rh_250', 'myaparc_250'),
            ('lh', 'myatlas_36_lh.gcs', 'lh.myaparc_36.annot',
             'regenerated_lh_36', 'myaparc_36'),
            ('lh', 'myatlasP1_16_lh.gcs', 'lh.myaparcP1_16.annot',
             'regenerated_lh_500', 'myaparcP1_16'),
            ('lh', 'myatlasP17_28_lh.gcs', 'lh.myaparcP17_28.annot',
             'regenerated_lh_500', 'myaparcP17_28'),
            ('lh', 'myatlasP29_36_lh.gcs', 'lh.myaparcP29_36.annot',
             'regenerated_lh_500', 'myaparcP29_36'),
            ('lh', 'myatlas_60_lh.gcs', 'lh.myaparc_60.annot',
             'regenerated_lh_60', 'myaparc_60'),
            ('lh', 'myatlas_125_lh.gcs', 'lh.myaparc_125.annot',
             'regenerated_lh_125', 'myaparc_125'),
            ('lh', 'myatlas_250_lh.gcs', 'lh.myaparc_250.annot',
             'regenerated_lh_250', 'myaparc_250'),
        ]

    log = cmp_config.get_logger()

    for out in comp:
        mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % (
            subject_id, out[0], op.join(subjects_dir, subject_id), out[0],
            cmp_config.get_lausanne_atlas(out[1]), op.join(
                fs_label_dir, out[2]))
        runCmd(mris_cmd, log)
        iflogger.info('-----------')

        annot = '--annotation "%s"' % out[4]

        mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % (
            subject_id, out[0], op.join(output_dir, out[3]), annot)
        iflogger.info(mri_an_cmd)
        runCmd(mri_an_cmd, log)
        iflogger.info('-----------')
        iflogger.info(os.environ['SUBJECTS_DIR'])
        # extract cc and unknown to add to tractography mask, we do not want this as a region of interest
        # in FS 5.0, unknown and corpuscallosum are not available for the 35 scale (why?),
        # but for the other scales only, take the ones from _60
        rhun = op.join(output_dir, 'rh.unknown.label')
        lhun = op.join(output_dir, 'lh.unknown.label')
        rhco = op.join(output_dir, 'rh.corpuscallosum.label')
        lhco = op.join(output_dir, 'lh.corpuscallosum.label')
    shutil.copy(op.join(output_dir, 'regenerated_rh_60', 'rh.unknown.label'),
                rhun)
    shutil.copy(op.join(output_dir, 'regenerated_lh_60', 'lh.unknown.label'),
                lhun)
    shutil.copy(
        op.join(output_dir, 'regenerated_rh_60', 'rh.corpuscallosum.label'),
        rhco)
    shutil.copy(
        op.join(output_dir, 'regenerated_lh_60', 'lh.corpuscallosum.label'),
        lhco)

    mri_cmd = """mri_label2vol --label "%s" --label "%s" --label "%s" --label "%s" --temp "%s" --o  "%s" --identity """ % (
        rhun, lhun, rhco, lhco,
        op.join(op.join(subjects_dir, subject_id), 'mri',
                'orig.mgz'), op.join(fs_label_dir, 'cc_unknown.nii.gz'))
    runCmd(mri_cmd, log)
    runCmd('mris_volmask %s' % subject_id, log)
    mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % (
        op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id))
    runCmd(mri_cmd, log)
    mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % (
        op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id))
    runCmd(mri_cmd, log)

    iflogger.info("[ DONE ]")
Esempio n. 12
0
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """
    iflogger.info("Create the ROIs:")
    output_dir = op.abspath(op.curdir)
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    parval = cmp_config._get_lausanne_parcellation(
        'Lausanne2008')[parcellation_name]
    pgpath = parval['node_information_graphml']
    aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
    asegd = aseg.get_data()

    iflogger.info("Working on parcellation: ")
    iflogger.info(
        cmp_config._get_lausanne_parcellation('Lausanne2008')
        [parcellation_name])
    iflogger.info("========================")
    pg = nx.read_graphml(pgpath)
    # each node represents a brain region
    # create a big 256^3 volume for storage of all ROIs
    rois = np.zeros((256, 256, 256), dtype=np.int16)

    count = 0
    for brk, brv in pg.nodes_iter(data=True):
        count = count + 1
        iflogger.info(brv)
        iflogger.info(brk)
        if brv['dn_hemisphere'] == 'left':
            hemi = 'lh'
        elif brv['dn_hemisphere'] == 'right':
            hemi = 'rh'
        if brv['dn_region'] == 'subcortical':
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv['dn_region']))
            iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")
            # if it is subcortical, retrieve roi from aseg
            idx = np.where(asegd == int(brv['dn_fs_aseg_val']))
            rois[idx] = int(brv['dn_correspondence_id'])

        elif brv['dn_region'] == 'cortical':
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv['dn_region']))
            iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")

            labelpath = op.join(output_dir,
                                parval['fs_label_subdir_name'] % hemi)
            # construct .label file name

            fname = '%s.%s.label' % (hemi, brv['dn_fsname'])

            # execute fs mri_label2vol to generate volume roi from the label file
            # store it in temporary file to be overwritten for each region

            mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (
                op.join(labelpath, fname), op.join(fs_dir, 'mri', 'orig.mgz'),
                op.join(output_dir, 'tmp.nii.gz'))
            runCmd(mri_cmd, log)

            tmp = nb.load(op.join(output_dir, 'tmp.nii.gz'))
            tmpd = tmp.get_data()

            # find voxel and set them to intensityvalue in rois
            idx = np.where(tmpd == 1)
            rois[idx] = int(brv['dn_correspondence_id'])

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.join(output_dir, 'ROI_%s.nii.gz' % parcellation_name)

        # update the header
        hdr = aseg.get_header()
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)

        log.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.get_affine(), hdr2)
        nb.save(img, out_roi)

    iflogger.info("[ DONE ]")
Esempio n. 13
0
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name):
    """ Creates the ROI_%s.nii.gz files using the given parcellation information
    from networks. Iteratively create volume. """
    iflogger.info("Create the ROIs:")
    output_dir = op.abspath(op.curdir)
    fs_dir = op.join(subjects_dir, subject_id)
    cmp_config = cmp.configuration.PipelineConfiguration()
    cmp_config.parcellation_scheme = "Lausanne2008"
    log = cmp_config.get_logger()
    parval = cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name]
    pgpath = parval["node_information_graphml"]
    aseg = nb.load(op.join(fs_dir, "mri", "aseg.nii.gz"))
    asegd = aseg.get_data()

    iflogger.info("Working on parcellation: ")
    iflogger.info(cmp_config._get_lausanne_parcellation("Lausanne2008")[parcellation_name])
    iflogger.info("========================")
    pg = nx.read_graphml(pgpath)
    # each node represents a brain region
    # create a big 256^3 volume for storage of all ROIs
    rois = np.zeros((256, 256, 256), dtype=np.int16)

    count = 0
    for brk, brv in pg.nodes_iter(data=True):
        count = count + 1
        iflogger.info(brv)
        iflogger.info(brk)
        if brv["dn_hemisphere"] == "left":
            hemi = "lh"
        elif brv["dn_hemisphere"] == "right":
            hemi = "rh"
        if brv["dn_region"] == "subcortical":
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv["dn_region"]))
            iflogger.info("Freesurfer Name: %s" % brv["dn_fsname"])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")
            # if it is subcortical, retrieve roi from aseg
            idx = np.where(asegd == int(brv["dn_fs_aseg_val"]))
            rois[idx] = int(brv["dn_correspondence_id"])

        elif brv["dn_region"] == "cortical":
            iflogger.info(brv)
            iflogger.info("---------------------")
            iflogger.info("Work on brain region: %s" % (brv["dn_region"]))
            iflogger.info("Freesurfer Name: %s" % brv["dn_fsname"])
            iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
            iflogger.info("---------------------")

            labelpath = op.join(output_dir, parval["fs_label_subdir_name"] % hemi)
            # construct .label file name

            fname = "%s.%s.label" % (hemi, brv["dn_fsname"])

            # execute fs mri_label2vol to generate volume roi from the label file
            # store it in temporary file to be overwritten for each region

            mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (
                op.join(labelpath, fname),
                op.join(fs_dir, "mri", "orig.mgz"),
                op.join(output_dir, "tmp.nii.gz"),
            )
            runCmd(mri_cmd, log)

            tmp = nb.load(op.join(output_dir, "tmp.nii.gz"))
            tmpd = tmp.get_data()

            # find voxel and set them to intensityvalue in rois
            idx = np.where(tmpd == 1)
            rois[idx] = int(brv["dn_correspondence_id"])

        # store volume eg in ROI_scale33.nii.gz
        out_roi = op.join(output_dir, "ROI_%s.nii.gz" % parcellation_name)

        # update the header
        hdr = aseg.get_header()
        hdr2 = hdr.copy()
        hdr2.set_data_dtype(np.uint16)

        log.info("Save output image to %s" % out_roi)
        img = nb.Nifti1Image(rois, aseg.get_affine(), hdr2)
        nb.save(img, out_roi)

    iflogger.info("[ DONE ]")
Esempio n. 14
0
def generate_WM_and_GM_mask(subjects_dir, subject_id, wm_mask_filename, roi_filename):
    """Taken from cmtk and adapted for running as isolated function in
    this interface.

    http://github.com/tnez/cmp/blob/master/cmp/stages/parcellation/maskcreation.py

    """
    import os.path as op
    import nibabel as ni
    import numpy as np
    
    fs_dir = op.join(subjects_dir,subject_id)
    output_dir = op.abspath(op.curdir)
    
    # need to convert
    mri_cmd = 'mri_convert -i "%s/mri/aparc+aseg.mgz" -o "%s/mri/aparc+aseg.nii.gz"' % (fs_dir, fs_dir)
    runCmd( mri_cmd, None )

    fout = op.join(fs_dir, 'mri', 'aparc+aseg.nii.gz') ##OUTPUT
    niiAPARCimg = ni.load(fout)
    niiAPARCdata = niiAPARCimg.get_data()

    # mri_convert aparc+aseg.mgz aparc+aseg.nii.gz
    WMout = op.join(output_dir, wm_mask_filename)

    #%% label mapping
    # Using FreesurferColorLUT.txt
    # mappings are stored in mappings.ods

#    CORTICAL = {1 : [ 1, 2, 3, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34],
#                2 : [31,13, 9,21,27,25,19,29,15,23, 1,24, 4,30,26,11, 6, 2, 5,22,16,14,10,20,12, 7, 8,18,30,17, 3,28,33]}
#
#
#    SUBCORTICAL = {1:[48,49,50,51,52,53,54,58,59,60, 9,10,11,12,13,17,18,26,27,28],
#                   2:[34,34,35,36,37,40,41,38,39,39,75,75,76,77,78,81,82,79,80,80]}
#
#    OTHER = {1:[16],
#             2:[83]}

    MAPPING = [[1,2012],[2,2019],[3,2032],[4,2014],[5,2020],[6,2018],[7,2027],[8,2028],[9,2003],[10,2024],
               [11,2017],[12,2026],[13,2002],[14,2023],[15,2010],[16,2022],[17,2031],[18,2029],[19,2008],
               [20,2025],[21,2005],[22,2021],[23,2011],[24,2013],[25,2007],[26,2016],[27,2006],[28,2033],
               [29,2009],[30,2015],[31,2001],[32,2030],[33,2034],[34,2035],[35,49],[36,50],[37,51],[38,52],
               [39,58],[40,53],[41,54],[42,1012],[43,1019],[44,1032],[45,1014],[46,1020],[47,1018],[48,1027],
               [49,1028],[50,1003],[51,1024],[52,1017],[53,1026],[54,1002],[55,1023],[56,1010],[57,1022],
               [58,1031],[59,1029],[60,1008],[61,1025],[62,1005],[63,1021],[64,1011],[65,1013],[66,1007],
               [67,1016],[68,1006],[69,1033],[70,1009],[71,1015],[72,1001],[73,1030],[74,1034],[75,1035],
               [76,10],[77,11],[78,12],[79,13],[80,26],[81,17],[82,18],[83,16]]

    WM = [2, 29, 32, 41, 61, 64, 59, 60, 27, 28] + range(77,86+1) + range(100, 117+1) + \
         range(155,158+1) + range(195,196+1) + range(199,200+1) + range(203,204+1) + \
         [212, 219, 223] + range(250,255+1)
    # add
    # 59  Right-Substancia-Nigra
    # 60  Right-VentralDC
    # 27  Left-Substancia-Nigra
    # 28  Left-VentralDC

    #%% create WM mask    
    niiWM = np.zeros( niiAPARCdata.shape, dtype = np.uint8 )

    for i in WM:
         niiWM[niiAPARCdata == i] = 1

    # we do not add subcortical regions
#    for i in SUBCORTICAL[1]:
#         niiWM[niiAPARCdata == i] = 1

    img = ni.Nifti1Image(niiWM, niiAPARCimg.get_affine(), niiAPARCimg.get_header())
    ni.save(img, WMout)

    #%% create GM mask (CORTICAL+SUBCORTICAL)
    #%  -------------------------------------
    GMout = op.join(output_dir, roi_filename)
    niiGM = np.zeros( niiAPARCdata.shape, dtype = np.uint8 )
    for ma in MAPPING:
        niiGM[ niiAPARCdata == ma[1]] = ma[0]
#        # % 33 cortical regions (stored in the order of "parcel33")
#        for idx,i in enumerate(CORTICAL[1]):
#            niiGM[ niiAPARCdata == (2000+i)] = CORTICAL[2][idx] # RIGHT
#            niiGM[ niiAPARCdata == (1000+i)] = CORTICAL[2][idx] + 41 # LEFT
#        #% subcortical nuclei
#        for idx,i in enumerate(SUBCORTICAL[1]):
#            niiGM[ niiAPARCdata == i ] = SUBCORTICAL[2][idx]
#        # % other region to account for in the GM
#        for idx, i in enumerate(OTHER[1]):
#            niiGM[ niiAPARCdata == i ] = OTHER[2][idx]
        img = ni.Nifti1Image(niiGM, niiAPARCimg.get_affine(), niiAPARCimg.get_header())
        ni.save(img, GMout)
Esempio n. 15
0
def generate_WM_and_GM_mask(subjects_dir, subject_id, wm_mask_filename, roi_filename):
    """Taken from cmtk and adapted for running as isolated function in
    this interface.

    http://github.com/tnez/cmp/blob/master/cmp/stages/parcellation/maskcreation.py

    """
    import os.path as op
    import nibabel as ni
    import numpy as np

    fs_dir = op.join(subjects_dir, subject_id)
    output_dir = op.abspath(op.curdir)

    # need to convert
    mri_cmd = 'mri_convert -i "%s/mri/aparc+aseg.mgz" -o "%s/mri/aparc+aseg.nii.gz"' % (fs_dir, fs_dir)
    runCmd(mri_cmd, None)

    fout = op.join(fs_dir, "mri", "aparc+aseg.nii.gz")  ##OUTPUT
    niiAPARCimg = ni.load(fout)
    niiAPARCdata = niiAPARCimg.get_data()

    # mri_convert aparc+aseg.mgz aparc+aseg.nii.gz
    WMout = op.join(output_dir, wm_mask_filename)

    #%% label mapping
    # Using FreesurferColorLUT.txt
    # mappings are stored in mappings.ods

    #    CORTICAL = {1 : [ 1, 2, 3, 5, 6, 7, 8, 9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34],
    #                2 : [31,13, 9,21,27,25,19,29,15,23, 1,24, 4,30,26,11, 6, 2, 5,22,16,14,10,20,12, 7, 8,18,30,17, 3,28,33]}
    #
    #
    #    SUBCORTICAL = {1:[48,49,50,51,52,53,54,58,59,60, 9,10,11,12,13,17,18,26,27,28],
    #                   2:[34,34,35,36,37,40,41,38,39,39,75,75,76,77,78,81,82,79,80,80]}
    #
    #    OTHER = {1:[16],
    #             2:[83]}

    MAPPING = [
        [1, 2012],
        [2, 2019],
        [3, 2032],
        [4, 2014],
        [5, 2020],
        [6, 2018],
        [7, 2027],
        [8, 2028],
        [9, 2003],
        [10, 2024],
        [11, 2017],
        [12, 2026],
        [13, 2002],
        [14, 2023],
        [15, 2010],
        [16, 2022],
        [17, 2031],
        [18, 2029],
        [19, 2008],
        [20, 2025],
        [21, 2005],
        [22, 2021],
        [23, 2011],
        [24, 2013],
        [25, 2007],
        [26, 2016],
        [27, 2006],
        [28, 2033],
        [29, 2009],
        [30, 2015],
        [31, 2001],
        [32, 2030],
        [33, 2034],
        [34, 2035],
        [35, 49],
        [36, 50],
        [37, 51],
        [38, 52],
        [39, 58],
        [40, 53],
        [41, 54],
        [42, 1012],
        [43, 1019],
        [44, 1032],
        [45, 1014],
        [46, 1020],
        [47, 1018],
        [48, 1027],
        [49, 1028],
        [50, 1003],
        [51, 1024],
        [52, 1017],
        [53, 1026],
        [54, 1002],
        [55, 1023],
        [56, 1010],
        [57, 1022],
        [58, 1031],
        [59, 1029],
        [60, 1008],
        [61, 1025],
        [62, 1005],
        [63, 1021],
        [64, 1011],
        [65, 1013],
        [66, 1007],
        [67, 1016],
        [68, 1006],
        [69, 1033],
        [70, 1009],
        [71, 1015],
        [72, 1001],
        [73, 1030],
        [74, 1034],
        [75, 1035],
        [76, 10],
        [77, 11],
        [78, 12],
        [79, 13],
        [80, 26],
        [81, 17],
        [82, 18],
        [83, 16],
    ]

    WM = (
        [2, 29, 32, 41, 61, 64, 59, 60, 27, 28]
        + range(77, 86 + 1)
        + range(100, 117 + 1)
        + range(155, 158 + 1)
        + range(195, 196 + 1)
        + range(199, 200 + 1)
        + range(203, 204 + 1)
        + [212, 219, 223]
        + range(250, 255 + 1)
    )
    # add
    # 59  Right-Substancia-Nigra
    # 60  Right-VentralDC
    # 27  Left-Substancia-Nigra
    # 28  Left-VentralDC

    #%% create WM mask
    niiWM = np.zeros(niiAPARCdata.shape, dtype=np.uint8)

    for i in WM:
        niiWM[niiAPARCdata == i] = 1

    # we do not add subcortical regions
    #    for i in SUBCORTICAL[1]:
    #         niiWM[niiAPARCdata == i] = 1

    img = ni.Nifti1Image(niiWM, niiAPARCimg.get_affine(), niiAPARCimg.get_header())
    ni.save(img, WMout)

    #%% create GM mask (CORTICAL+SUBCORTICAL)
    #%  -------------------------------------
    GMout = op.join(output_dir, roi_filename)
    niiGM = np.zeros(niiAPARCdata.shape, dtype=np.uint8)
    for ma in MAPPING:
        niiGM[niiAPARCdata == ma[1]] = ma[0]
        #        # % 33 cortical regions (stored in the order of "parcel33")
        #        for idx,i in enumerate(CORTICAL[1]):
        #            niiGM[ niiAPARCdata == (2000+i)] = CORTICAL[2][idx] # RIGHT
        #            niiGM[ niiAPARCdata == (1000+i)] = CORTICAL[2][idx] + 41 # LEFT
        #        #% subcortical nuclei
        #        for idx,i in enumerate(SUBCORTICAL[1]):
        #            niiGM[ niiAPARCdata == i ] = SUBCORTICAL[2][idx]
        #        # % other region to account for in the GM
        #        for idx, i in enumerate(OTHER[1]):
        #            niiGM[ niiAPARCdata == i ] = OTHER[2][idx]
        img = ni.Nifti1Image(niiGM, niiAPARCimg.get_affine(), niiAPARCimg.get_header())
        ni.save(img, GMout)