Пример #1
0
    def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False):
        """
        Parse NIFTI file(s) and returns A Surface or a TimeSeries for it.
        :param surface_type: one of "Cortex" "Head" "ReadFromMetaData"
        :param data_file_part2: a file containing the second part of the surface
        """
        self.logger.debug("Start to parse GIFTI file: %s" % data_file)
        if data_file is None:
            raise ParseException("Please select GIFTI file which contains data to import")
        if not os.path.exists(data_file):
            raise ParseException("Provided file %s does not exists" % data_file)
        if data_file_part2 is not None and not os.path.exists(data_file_part2):
            raise ParseException("Provided file part %s does not exists" % data_file_part2)

        try:
            gifti_image = giftiio.read(data_file)
            data_arrays = gifti_image.darrays

            self.logger.debug("File parsed successfully")
            if data_file_part2 is not None:
                data_arrays_part2 = giftiio.read(data_file_part2).darrays
            else:
                data_arrays_part2 = None
        except Exception, excep:
            self.logger.exception(excep)
            msg = "File: %s does not have a valid GIFTI format." % data_file
            raise ParseException(msg)
Пример #2
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read

        read(DATA_FILE1)
        assert_equal(len(w), 1)
Пример #3
0
def mean_value_phenotyping(pair):
    i, task = pair
    for side in sides:
        file_parcels = os.path.join(group_path, side+'.fsaverage41k.label.gii')
        array_parcels = gio.read(file_parcels).darrays[0].data
        for k, parcel in enumerate(parcels_org):
            parameters.append([k, parcel])
            working_dir = 'functional_analysis/solar_analysis'
            output_dir = os.path.join('/neurospin/brainomics/2016_HCP/functional_analysis/pheno_mean_value', task+'_'+str(i), side)
            if not os.path.isdir(output_dir):
                os.makedirs(output_dir)
            index = np.where(array_parcels == parcel)[0]
            array_s_ids = []
            array_mu = []
            for s_id in s_ids:
                file_path = path+s_id+'/MNINonLinear/Results/tfMRI_'+task+'/tfMRI_'+task+'_hp200_s2_level2'+reg+'.feat/GrayordinatesStats/cope'+str(i)+'.feat/'
                metric_out = os.path.join(file_path, side+"_"+fil+".41k_fsavg_"+side+".func.gii")
                if os.path.isfile(metric_out):
                    array_s_ids.append(s_id)
                    array_fil = gio.read(metric_out).darrays[0].data
                    array_mu.append(np.mean(array_fil[index]))
            df = pd.DataFrame()
            df['IID'] = array_s_ids
            df[fil] = array_mu
            output = os.path.join(output_dir, parcels_name[k]+'_'+fil)
            print "saving "+output
            df.to_csv(output+'.csv',  header=True, index=False)
Пример #4
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read

        read(DATA_FILE1)
        assert_equal(len(w), 1)
def main(args):
    ''' Reads both meshes, finds associations between nodes from both meshes
    and writes :
    - a distance map between each pair of associated nodes
    - a difference map between texture values for each pair of nodes
    If n is provided, stops the process after n random nodes'''
    import thickness as t
    import random
    m1 = t.Mesh(args.m1)
    m2 = t.Mesh(args.m2)
    t1 = gio.read(args.t1).darrays[0].data
    t2 = gio.read(args.t2).darrays[0].data

    diff = [-1] * len(m1.vertex)
    dist = [-1] * len(m1.vertex)

    if args.n:
       for i in xrange(int(args.n)):
          if i%1000==0: print i, '/', int(args.n)
          r = random.randint(0, len(m1.vertex))
          diff[r], dist[r] = compare_node(r, m1, m2, t1, t2)
    else:
       for r in xrange(len(m1.vertex)):
          if r%1000==0: print r, '/', len(m1.vertex)
          diff[r], dist[r] = compare_node(r, m1, m2, t1, t2)

    gda = gifti.GiftiDataArray.from_array(np.array(diff), intent=1001)
    g = gifti.GiftiImage(darrays=[gda])
    gio.write(g, args.difffp)

    gda = gifti.GiftiDataArray.from_array(np.array(dist), intent=1001)
    g = gifti.GiftiImage(darrays=[gda])
    gio.write(g, args.distfp)
Пример #6
0
    def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False):
        """
        Parse NIFTI file(s) and returns A Surface or a TimeSeries for it.
        :param surface_type: one of "Cortex" "Head" "ReadFromMetaData"
        :param data_file_part2: a file containing the second part of the surface
        """
        self.logger.debug("Start to parse GIFTI file: %s" % data_file)
        if data_file is None:
            raise ParseException("Please select GIFTI file which contains data to import")
        if not os.path.exists(data_file):
            raise ParseException("Provided file %s does not exists" % data_file)
        if data_file_part2 is not None and not os.path.exists(data_file_part2):
            raise ParseException("Provided file part %s does not exists" % data_file_part2)

        try:
            gifti_image = giftiio.read(data_file)
            data_arrays = gifti_image.darrays

            self.logger.debug("File parsed successfully")
            if data_file_part2 is not None:
                data_arrays_part2 = giftiio.read(data_file_part2).darrays
            else:
                data_arrays_part2 = None
        except Exception, excep:
            self.logger.exception(excep)
            msg = "File: %s does not have a valid GIFTI format." % data_file
            raise ParseException(msg)
Пример #7
0
def pits_DPF_distribution(parameters):
    """pits_DPF_distribution(database, array_areals, s_ids,
    areal, side,  sd_template="lh",
    outdir='/tmp/'):
    """
    database, array_areals, s_ids, areal, side, sd_template, outdir = parameters
    """ 
    Parameters

    database: path of Morphologist database
    array_areals: array containing the areals information for each vertex
    s_ids: list of s_ids to consider
    areal: considered areal number in the areal file
    side: either R or L
    sd_template: side of the template
    outdir: output directory
    """
    if not (side == 'R' or side == 'L'):
        raise ValueError("argument side must be either 'R' or 'L'")

    OUTPUT = os.path.join(outdir, side)
    if not os.path.isdir(OUTPUT):
        os.makedirs(OUTPUT)

    # Array containing the pits DPF values in the areal for all subjects
    X = np.array([])
    for s_id in s_ids:
        file_pits = os.path.join(database, s_id, "t1mri", "BL",
                                 "default_analysis", "segmentation", "mesh",
                                 "surface_analysis_" + sd_template,
                                 s_id + "_" + side + "white_pits_on_atlas.gii")
        file_DPF = os.path.join(database, s_id, "t1mri", "BL",
                                "default_analysis", "segmentation", "mesh",
                                "surface_analysis_" + sd_template,
                                s_id + "_" + side + "white_DPF_on_atlas.gii")
        #print file_DPF
        if os.path.isfile(file_pits) and os.path.isfile(file_DPF):
            array_pits = gio.read(file_pits).darrays[0].data
            array_DPF = gio.read(file_DPF).darrays[0].data
            # Find the index of pits
            index_pits = np.nonzero(array_pits)[0]
            # Identify their corresponding areals
            areals = array_areals[index_pits]
            # Find index of pits in the considered areal
            ind = np.where(areals == areal)[0]
            # test if the subject has pit in this parcel
            if ind.size:
                X = np.concatenate((X, array_DPF[index_pits[ind]]))
        else:
            print str(s_id) + " is missing either pits or DPF files"

    # Save the array X to output
    output = os.path.join(OUTPUT, 'Areal_' + str(areal) + '.txt')
    print "saving " + output
    np.savetxt(output, X)
Пример #8
0
def median_value_phenotyping(parameters):
    """
    Parameters
    task: HCP task name
    reg: type of registration either 'MSMAll' or '' for MSMSulc
    COPE: COPE number cf https://wiki.humanconnectome.org/display/..
                                ..PublicData/Task+fMRI+Contrasts
    s_ids: list of subject ids
    sd: hemisphere side
    areal: areal number on the texture of the HCP multimodal parcellation
    k: numero of the areal in areals_name table
    outdir: output directory
    """
    task, reg, COPE, s_ids, file_areals, sd, areal, k, outdir = parameters
    array_areals = gio.read(file_areals).darrays[0].data
    index = np.where(array_areals == areal)[0]
    array_s_ids = []
    array_mu = []
    for s_id in s_ids:
        f_path = os.path.join(
            path_org, '3T_tfMRI_' + task, s_id, 'MNINonLinear/Results',
            'tfMRI_' + task,
            'tfMRI_' + task + '_hp200_s2_level2' + reg + '.feat',
            'GrayordinatesStats/cope' + str(COPE) + '.feat')
        # For some subjects the MSMAll reg is not available in HCP database
        # this does not apply to the LANGUAGE tasks
        if not os.path.isdir(f_path):
            reg_bis = ''
            f_path = os.path.join(
                path_org, '3T_tfMRI_' + task, s_id, 'MNINonLinear/Results',
                'tfMRI_' + task,
                'tfMRI_' + task + '_hp200_s2_level2' + reg_bis + '.feat',
                'GrayordinatesStats/cope' + str(COPE) + '.feat')

        metric_out = os.path.join(
            f_path, sd + "_" + fil + ".41k_fsavg_" + sd + ".func.gii")
        if os.path.isfile(metric_out):
            array_s_ids.append(s_id)
            array_fil = gio.read(metric_out).darrays[0].data
            if median:
                #array_mu.append(np.amax(array_fil[index]))
                array_mu.append(np.median(array_fil[index]))
            else:
                array_mu.append(np.mean(array_fil[index]))
        else:
            pass
            #print metric_out+" doesn't exist for "+s_id
    df = pd.DataFrame()
    df['IID'] = array_s_ids
    df[fil] = array_mu
    output = os.path.join(outdir, areals_name[k] + '_' + fil)
    print "saving " + output
    df.to_csv(output + '.csv', header=True, index=False)
Пример #9
0
    def read(self, data_file, use_center_surface):
        gifti_image = giftiio.read(data_file)
        image_metadata = gifti_image.meta.metadata
        self.logger.info(
            "From the file %s the extracted metadata is %s", data_file, image_metadata)

        data_arrays = gifti_image.darrays
        vertices = data_arrays[0].data
        triangles = data_arrays[1].data

        vol_geom_center_ras = [0, 0, 0]
        vertices_metadata = data_arrays[0].metadata
        self.logger.info(
            "The metadata from vertices data array is %s", vertices_metadata)
        vertices_coord_system = data_arrays[0].coordsys
        self.logger.info(
            "The coordinate system transform matrix from vertices data array is %s", vertices_coord_system)
        triangles_metadata = data_arrays[1].metadata
        self.logger.info(
            "The metadata from triangles data array is %s", triangles_metadata)

        if use_center_surface:
            vol_geom_center_ras = [0, 0, 0]
        else:
            vol_geom_center_ras[0] = float(
                vertices_metadata[CENTER_RAS_GIFTI_SURF[0]])
            vol_geom_center_ras[1] = float(
                vertices_metadata[CENTER_RAS_GIFTI_SURF[1]])
            vol_geom_center_ras[2] = float(
                vertices_metadata[CENTER_RAS_GIFTI_SURF[2]])

        return Surface(vertices, triangles, area_mask=None,
                       center_ras=vol_geom_center_ras, vertices_coord_system=vertices_coord_system,
                       generic_metadata=image_metadata, vertices_metadata=vertices_metadata,
                       triangles_metadata=triangles_metadata)
Пример #10
0
    def parse_gifti(self, data_file):
        gifti_image = giftiio.read(data_file)
        image_metadata = gifti_image.meta.metadata

        logger = get_logger(__name__)
        logger.info("From the file %s the extracted metadata is %s" % (data_file, image_metadata))

        data_arrays = gifti_image.darrays
        vertices = data_arrays[0].data
        triangles = data_arrays[1].data

        vol_geom_center_ras = [0, 0, 0]
        vertices_metadata = data_arrays[0].metadata
        logger.info("The metadata from vertices data array is %s" % vertices_metadata)
        vertices_coord_system = data_arrays[0].coordsys
        logger.info("The coordinate system transform matrix from vertices data array is %s" % vertices_coord_system)
        triangles_metadata = data_arrays[1].metadata
        logger.info("The metadata from triangles data array is %s" % triangles_metadata)

        # TODO we could read this values directly form the metadata
        vol_geom_center_ras[0] = float(data_arrays[0].metadata['VolGeomC_R'])
        vol_geom_center_ras[1] = float(data_arrays[0].metadata['VolGeomC_A'])
        vol_geom_center_ras[2] = float(data_arrays[0].metadata['VolGeomC_S'])

        return Surface(vertices, triangles, vol_geom_center_ras, image_metadata, vertices_metadata, vertices_coord_system, triangles_metadata)
Пример #11
0
def _get_gifti_image(samples):
    if isinstance(samples, basestring):
        samples = giftiio.read(samples)

    required_class = gifti.GiftiImage
    if not isinstance(samples, required_class):
        raise TypeError('Input of type %s must be a %s' %
                        (samples, required_class))

    return samples
Пример #12
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
Пример #13
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
Пример #14
0
def _get_gifti_image(samples):
    if isinstance(samples, basestring):
        samples = giftiio.read(samples)

    required_class = gifti.GiftiImage
    if not isinstance(samples, required_class):
        raise TypeError('Input of type %s must be a %s' %
                        (samples, required_class))

    return samples
Пример #15
0
    def parse(self, data_file, data_file_part2=None, surface_type=OPTION_READ_METADATA, should_center=False):
        """
        Parse NIFTI file(s) and returns A Surface or a TimeSeries for it.
        :param surface_type: one of "Cortex" "Head" "ReadFromMetaData"
        :param data_file_part2: a file containing the second part of the surface
        """
        self.logger.debug("Start to parse GIFTI file: %s" % data_file)
        if data_file is None:
            raise ParseException("Please select GIFTI file which contains data to import")
        if not os.path.exists(data_file):
            raise ParseException("Provided file %s does not exists" % data_file)
        if data_file_part2 is not None and not os.path.exists(data_file_part2):
            raise ParseException("Provided file part %s does not exists" % data_file_part2)

        try:
            gifti_image = giftiio.read(data_file)
            data_arrays = gifti_image.darrays

            self.logger.debug("File parsed successfully")
            if data_file_part2 is not None:
                data_arrays_part2 = giftiio.read(data_file_part2).darrays
            else:
                data_arrays_part2 = None
        except Exception as excep:
            self.logger.exception(excep)
            msg = "File: %s does not have a valid GIFTI format." % data_file
            raise ParseException(msg)

        self.logger.debug("Determine data type stored in GIFTI file")
        
        # First check if it's a surface
        if self._is_surface_gifti(data_arrays):
            # If a second part exists is must be of the same type
            if data_arrays_part2 is not None and not self._is_surface_gifti(data_arrays_part2):
                raise ParseException("Second file must be a surface too")
            return self._parse_surface(data_arrays, data_arrays_part2, surface_type, should_center)
        elif self._is_timeseries_gifti(data_arrays):
            return self._parse_timeseries(data_arrays)
        else:
            raise ParseException("Could not map data from GIFTI file to a TVB data type")
Пример #16
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]


    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
 def parse(self, data_file):
     """
         Parse NIFTI file and returns TimeSeries for it. 
     """
     self.logger.debug("Start to parse GIFTI file: %s"%data_file)
     if data_file is None:
         raise ParseException ("Please select GIFTI file which contains data to import")
     if not os.path.exists(data_file):
         raise ParseException ("Provided file %s does not exists"%data_file)
     try:
         gifti_image = giftiio.read(data_file)
         self.logger.debug("File parsed successfully")
     except Exception, excep:
         self.logger.exception(excep)
         msg = "File: %s does not have a valid GIFTI format." % data_file
         raise ParseException(msg)
Пример #18
0
 def parse(self, data_file):
     """
         Parse NIFTI file and returns TimeSeries for it. 
     """
     self.logger.debug("Start to parse GIFTI file: %s"%data_file)
     if data_file is None:
         raise ParseException ("Please select GIFTI file which contains data to import")
     if not os.path.exists(data_file):
         raise ParseException ("Provided file %s does not exists"%data_file)
     try:
         gifti_image = giftiio.read(data_file)
         self.logger.debug("File parsed successfully")
     except Exception, excep:
         self.logger.exception(excep)
         msg = "File: %s does not have a valid GIFTI format." % data_file
         raise ParseException(msg)
Пример #19
0
def load_folds(folds_file, graph_file=None):
    """ Load morphologist folds and associated labels.

    Parameters
    ----------
    folds_file: str( mandatory)
        the folds '.gii' file.
    graph_file: str (optional, default None)
        the path to a morphologist '.arg' graph file.

    Returns
    -------
    folds: dict with TriSurface
        all the loaded folds. The fold names are stored in the metadata.
    """
    # Load the labels
    if graph_file is not None:
        labels = parse_graph(graph_file)
    else:
        labels = {}

    # Load folds
    image = gio.read(folds_file)
    nb_of_surfs = len(image.darrays)
    if nb_of_surfs % 2 != 0:
        raise ValueError("Need an odd number of arrays (vertices, triangles).")
    folds = {}
    for vertindex in range(0, nb_of_surfs, 2):
        vectices = image.darrays[vertindex].data
        triangles = image.darrays[vertindex + 1].data
        labelindex = image.darrays[vertindex].get_metadata()["Timestep"]
        if labelindex != image.darrays[vertindex +
                                       1].get_metadata()["Timestep"]:
            raise ValueError("Gifti arrays '{0}' and '{1}' do not share the "
                             "same label.".format(vertindex, vertindex + 1))
        labelindex = int(labelindex)
        if labelindex in labels:
            label = labels[labelindex]
        else:
            label = "NC"
        metadata = {"fold_name": label}
        surf = TriSurface(vectices, triangles, labels=None, metadata=metadata)
        folds[labelindex] = surf

    return folds
Пример #20
0
def cortical_mesh_items(fname, xyz=[1, 0, 2]):
    glopt = {'shader': 'shaded',
             'glOptions': 'opaque',
             'smooth': True,
             'color': (0.7, 0.7, 0.7, 1.0)}
    parts = fname.split('_')
    bname = '_'.join(parts[:-1])
    gtype = parts[-1].split('.gii')[0][1:]
    items = []
    centers = []
    for hemi in 'LR':
        gii = giftiio.read('%s_%s%s.gii' % (bname, hemi, gtype))
        vert, face = [a.data for a in gii.darrays]
        print vert.shape, face.shape
        items.append(gl.GLMeshItem(vertexes=vert, face=face[:, xyz], **glopt))
        centers.append(vert.mean(axis=0))
    center = (centers[0] + centers[1]) / 2.0
    return center, items
Пример #21
0
def read(fn):
    '''Reads a GIFTI surface file

    Parameters
    ----------
    fn: str
        Filename

    Returns
    -------
    surf_: surf.Surface
        Surface

    Notes
    -----
    Any meta-information stored in the GIFTI file is not present in surf_.
    '''

    g = giftiio.read(fn)

    vertices = _get_single_array(g, 'NIFTI_INTENT_POINTSET').data
    faces = _get_single_array(g, 'NIFTI_INTENT_TRIANGLE').data

    return surf.Surface(vertices, faces)
Пример #22
0
def read(fn):
    '''Reads a GIFTI surface file

    Parameters
    ----------
    fn: str
        Filename

    Returns
    -------
    surf_: surf.Surface
        Surface

    Notes
    -----
    Any meta-information stored in the GIFTI file is not present in surf_.
    '''

    g = giftiio.read(fn)

    vertices = _get_single_array(g, 'NIFTI_INTENT_POINTSET').data
    faces = _get_single_array(g, 'NIFTI_INTENT_TRIANGLE').data

    return surf.Surface(vertices, faces)
Пример #23
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]

    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    # test float64 and int64, which must be converted to float32 and int32
    fa = dict()
    if include_nodes:
        fa['node_indices'] = ds.fa.node_indices.astype(np.int64)

    ds_float64 = Dataset(samples=ds.samples.astype(np.float64), fa=fa)
    ds_float64_again = gifti_dataset(map2gifti(ds_float64))
    assert_equal(ds_float64_again.samples.dtype, np.float32)
    if include_nodes:
        assert_equal(ds_float64_again.fa.node_indices.dtype, np.int32)


    # test contents of GIFTI image
    assert (isinstance(img2, nb_gifti.GiftiImage))
    nsamples = ds.samples.shape[0]
    if include_nodes:
        node_arr = img2.darrays[0]
        assert_equal(node_arr.intent,
                     intent_codes.code['NIFTI_INTENT_NODE_INDEX'])
        assert_equal(node_arr.coordsys, None)
        assert_equal(node_arr.data.dtype, np.int32)
        assert_equal(node_arr.datatype, data_type_codes['int32'])

        first_data_array_pos = 1
        narrays = nsamples + 1
    else:
        first_data_array_pos = 0
        narrays = nsamples

    assert_equal(len(img.darrays), narrays)
    for i in xrange(nsamples):
        arr = img2.darrays[i + first_data_array_pos]

        # check intent code
        illegal_intents = ['NIFTI_INTENT_NODE_INDEX',
                           'NIFTI_INTENT_GENMATRIX',
                           'NIFTI_INTENT_POINTSET',
                           'NIFTI_INTENT_TRIANGLE']
        assert (arr.intent not in [intent_codes.code[s]
                                   for s in illegal_intents])

        # although the GIFTI standard is not very clear about whether
        # arrays with other intent than NODE_INDEX can have a
        # GiftiCoordSystem, FreeSurfer's mris_convert
        # does not seem to like its presence. Thus we make sure that
        # it's not there.

        assert_equal(arr.coordsys, None)
        assert_equal(arr.data.dtype, np.float32)
        assert_equal(arr.datatype, data_type_codes['float32'])



    # another test for map2gifti, setting the encoding explicitly
    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
Пример #24
0
    # Save the array X to output
    output = os.path.join(OUTPUT, 'Areal_' + str(areal) + '.txt')
    print "saving " + output
    np.savetxt(output, X)


if __name__ == '__main__':

    ROOT_DIR = ""
    # In the following, lh refers to the symmetric template side
    file_areals = ('clusters_total_average_pits_smoothed0.7_60_sym_lh_dist15.0'
                   '_area100.0_ridge2.0.gii')
    areals = os.path.join(ROOT_DIR, '2016_HCP_pits_cleaned',
                          'pits_density_sym_lh', file_areals)

    array_areals = gio.read(areals).darrays[0].data
    # Obtain a list of areal numbers
    areals_list = np.unique(array_areals)
    # Exclude the first areal corresponding to corpus callosum and fornix
    areals_list = areals_list[1:]

    # Path to the Morphologist database
    database = ''
    s_ids = os.listdir(database)
    s_ids = sorted(s_ids)

    # output directory
    outdir = os.path.join(ROOT_DIR, '2016_HCP_pits_cleaned',
                          'pits_analysis_lh', 'pits_DPF_distribution')

    parameters = []
Пример #25
0
INPUT = "/neurospin/brainomics/2016_sulcal_pits/extracting_pits_V1/"

## OUTPUT ##
OUTPUT = "/neurospin/brainomics/2016_sulcal_pits/pheno_pits/test0_V1/"

path = INPUT + sides[side] + '/kept/pits/'
s_ids = []
for filename in glob.glob(os.path.join(path, '*.txt')):
    m = re.search(path + side + '(.+?)_pits_kept.txt', filename)
    if m:
        label = m.group(1)
        if '000' in label:
            s_ids.append(label)

filename_average = '/neurospin/imagen/workspace/cati/BVdatabase/average_' + side + 'mesh_BL.gii'
mesh_average = gio.read(filename_average)
vertices_origin = mesh_average.darrays[0].data
triangles_origin = mesh_average.darrays[1].data
file_parcels_marsAtlas = '/neurospin/imagen/workspace/cati/hiphop138-Template/hiphop138_' + side + 'white_parcels_marsAtlas.gii'
parcels_data = gio.read(file_parcels_marsAtlas).darrays[0].data
"""cortex = np.nonzero(parcels_data)[0] # because from my understanding the medial wall (which is the non-cortex part) is the parcel of value 0
keep = np.zeros(triangles_origin.shape[0])
for i in [0, 1, 2]:
    keep += np.array([item in cortex for item in triangles_origin[:, i]])
ind = np.where(keep == 3)[0]
triangles = np.array(triangles_origin[ind], dtype=np.int32)
triangles_old = np.array(triangles_origin[ind], dtype=np.int32)
for c, i in enumerate(cortex):
    triangles[np.where(triangles_old == i)] = c
vertices = vertices_origin[cortex].astype(np.float64)
triangles = triangles.astype(np.int32)
Пример #26
0
def get_parcel_info():
    # load parcellation
    lhfile=os.path.join(basedir,'all_selected_L_new_parcel_renumbered.func.gii')
    rhfile=os.path.join(basedir,'all_selected_R_new_parcel_renumbered.func.gii')
    
    lh=giftiio.read(lhfile)
    rh=giftiio.read(rhfile)
    
    lhparcvals=numpy.unique(lh.darrays[0].data)[1:]
    rhparcvals=numpy.unique(rh.darrays[0].data)[1:]
    
    lhinfomapfile=os.path.join(basedir,'parcel_L_consensus_new.func.gii')
    rhinfomapfile=os.path.join(basedir,'parcel_R_consensus_new.func.gii')
    
    lhinfomap=nibabel.gifti.giftiio.read(lhinfomapfile)
    rhinfomap=nibabel.gifti.giftiio.read(rhinfomapfile)
    
    # open community files
    communities_orig=numpy.loadtxt(os.path.join(basedir,'parcel_infomap_assigns_startpos50_neworder_communities.txt'))
    communities_orig_idx=numpy.loadtxt(os.path.join(basedir,'parcel_infomap_assigns_startpos50_neworder_consensus_ind.txt'))
    
    lhsurffile=os.path.join(fsdir,'sub013.L.midthickness.32k_fs_LR.surf.gii')
    lhsurf=giftiio.read(lhsurffile)
    rhsurffile=os.path.join(fsdir,'sub013.R.midthickness.32k_fs_LR.surf.gii')
    rhsurf=giftiio.read(rhsurffile)
    
    
    # get mean XYZ location and central vertex of each parcel
    lhcoords=numpy.zeros((len(lhparcvals),3))
    lhcentvertex=numpy.zeros(len(lhparcvals))
    
    for i in range(len(lhparcvals)):
        v=lhparcvals[i]
        vertices=numpy.where(lh.darrays[0].data==v)[0]
        lhcoords[i,:]=numpy.mean(lhsurf.darrays[0].data[vertices,:],0)
        vdist=numpy.zeros(len(vertices))
        for j in range(len(vertices)):
            vdist[j]= numpy.linalg.norm(lhsurf.darrays[0].data[j,:] - lhcoords[i,:])
        lhcentvertex[i]=vertices[vdist==numpy.min(vdist)]
        
    rhcoords=numpy.zeros((len(rhparcvals),3))
    rhcentvertex=numpy.zeros(len(rhparcvals))
    
    for i in range(len(rhparcvals)):
        v=rhparcvals[i]
        vertices=numpy.where(rh.darrays[0].data==v)[0]
        rhcoords[i,:]=numpy.mean(rhsurf.darrays[0].data[vertices,:],0)
        vdist=numpy.zeros(len(vertices))
        for j in range(len(vertices)):
            vdist[j]= numpy.linalg.norm(rhsurf.darrays[0].data[j,:] - rhcoords[i,:])
        rhcentvertex[i]=vertices[vdist==numpy.min(vdist)]
            
    # get labels for each parcel
    
    llabelfile=os.path.join(fsdir,'sub013.L.aparc.32k_fs_LR.label.gii')
    rlabelfile=os.path.join(fsdir,'sub013.R.aparc.32k_fs_LR.label.gii')
    
    lparc=giftiio.read(llabelfile)
    rparc=giftiio.read(rlabelfile)
    
    
    lhparclabels=[]
    rhparclabels=[]
    
    for i in range(len(lhcentvertex)):
        l=lparc.darrays[0].data[lhcentvertex[i]]
        lhparclabels.append(lparc.labeltable.labels[l].label)
    
    for i in range(len(rhcentvertex)):
        l=rparc.darrays[0].data[rhcentvertex[i]]
        rhparclabels.append(rparc.labeltable.labels[l].label)
    
    
    # get RSN labels from HCP tutorial data
    l_rsnlabelfile=os.path.join(tutorialdir,'RSN-networks.L.32k_fs_LR.label.gii')
    r_rsnlabelfile=os.path.join(tutorialdir,'RSN-networks.R.32k_fs_LR.label.gii')
    
    l_rsn=giftiio.read(l_rsnlabelfile)
    r_rsn=giftiio.read(r_rsnlabelfile)
    
    f=open(os.path.join(basedir,'module_names.txt'))
    power_network_names={}
    for l in f.readlines():
        l_s=l.strip().split('\t')
        power_network_names[float(l_s[0])]=l_s[1]
    f.close()
    
    l_rsnlabels=[]
    r_rsnlabels=[]
    l_rsnlabels_yeo7=[]
    r_rsnlabels_yeo7=[]
    l_rsnlabels_yeo17=[]
    r_rsnlabels_yeo17=[]
    
    for i in range(len(lhcentvertex)):
        l=lhinfomap.darrays[0].data[lhcentvertex[i]]
        l_rsnlabels.append(power_network_names[l])
        l=l_rsn.darrays[0].data[lhcentvertex[i]]
        l_rsnlabels_yeo7.append(l_rsn.labeltable.labels[l].label)
        l=l_rsn.darrays[1].data[lhcentvertex[i]]
        l_rsnlabels_yeo17.append(l_rsn.labeltable.labels[l].label)
    
    for i in range(len(rhcentvertex)):
        l=rhinfomap.darrays[0].data[rhcentvertex[i]]
        r_rsnlabels.append(power_network_names[l])
        l=r_rsn.darrays[0].data[rhcentvertex[i]]
        r_rsnlabels_yeo7.append(r_rsn.labeltable.labels[l].label)
        l=r_rsn.darrays[1].data[rhcentvertex[i]]
        r_rsnlabels_yeo17.append(r_rsn.labeltable.labels[l].label)
    
    # get lobe
    
    atlasfile=os.path.join(fsldir,'data/atlases/MNI/MNI-maxprob-thr0-2mm.nii.gz')
    
    atlas=nibabel.load(atlasfile)
    atlasdata=atlas.get_data()
    lobenames=['None','Caudate','Cerebellum','Frontal','Insula','Occipital','Parietal','Putamen','Temporal','Thalamus']
    
    def mni2vox(mni,qform):
        tmp=numpy.linalg.inv(qform)*numpy.matrix([mni[0],mni[1],mni[2],1]).T
        return numpy.array(numpy.round(tmp.T[0,0:3]))[0]
    
    l_lobe=[]
    r_lobe=[]
    
    for i in range(len(lhcentvertex)):
        vox=mni2vox(lhcoords[i,:],atlas.get_qform())
        lobe=atlasdata[vox[0],vox[1],vox[2]]
        l_lobe.append(lobenames[lobe])
    for i in range(len(rhcentvertex)):
        vox=mni2vox(rhcoords[i,:],atlas.get_qform())
        lobe=atlasdata[vox[0],vox[1],vox[2]]
        r_lobe.append(lobenames[lobe])
    
    use_aseg=True
    
    def vox2mni(vox,qform):
        vox=numpy.array([vox[0],vox[1],vox[2],1])
        return qform.dot(vox)[:3]
    
    if use_aseg:
    # now get data from APARC
        f=open(os.path.join(stdir,'aseg/aseg_fields.txt'))
        asegnames={}
        for l in f.readlines():
            l_s=l.strip().split()
            asegnames[int(l_s[0])]=l_s[1]
            f.close()
        asegkeys=asegnames.keys()
        asegkeys.sort()
        asegimg=nibabel.load(os.path.join(stdir,'aseg/aparc+aseg_reg2mni.nii.gz'))
        asegdata=asegimg.get_data()
    
        meancoords={}
        for k in asegkeys:
            mni=[]
            v=numpy.where(asegdata==k)
            nvox=len(v[0])
            for i in range(nvox):
                mni.append(vox2mni([v[0][i],v[1][i],v[2][i]],asegimg.get_qform()))
            mni=numpy.matrix(mni)
            meancoords[k]=numpy.mean(mni,0)
    
    
    f=open(os.path.join(basedir,'parcel_data.txt'),'w')
    ctr=1
    for i in range(len(lhcentvertex)):
        f.write('%d\tL\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n'%(ctr,lhcoords[i,0],
                    lhcoords[i,1],lhcoords[i,2],l_lobe[i],lhparclabels[i],
                    l_rsnlabels[i],l_rsnlabels_yeo7[i],l_rsnlabels_yeo17[i]))
        ctr+=1
    
    for i in range(len(rhcentvertex)):
        f.write('%d\tR\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n'%(ctr,rhcoords[i,0],
                    rhcoords[i,1],rhcoords[i,2],r_lobe[i],rhparclabels[i],
                    r_rsnlabels[i],r_rsnlabels_yeo7[i],r_rsnlabels_yeo17[i]))
        ctr+=1
    
    
    if use_aseg:
      for k in asegkeys:
        if meancoords[k][0,0]<0:
            hemis='L'
        else:
            hemis='R'
        f.write('%d\t%s\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n'%(ctr,hemis,meancoords[k][0,0],meancoords[k][0,1],meancoords[k][0,2],'subcortical',asegnames[k],'na','na','na'))
        ctr+=1
        
    f.close()
def map_pits_heritability(template, side, areals, indir, cb, interval_cb,
                          dict_freq, pval_thresh, outdir):
    """
    Parameters
    template: path to template mesh file
    areals: path to gii file containing the texture of areals
    side: hemisphere side from which SOLAR estimates are displayed
    indir: directory containing h2 and pval dictionaries
    cb: colorbar names must be an array with [cb_h2, cb_pval]
    interval_cb: number boundaries for the colobar display
                 must be a double array [inter_h2[0,1], inter_pval[0,1]]
    dict_freq: dictionary containing pits frequency per areal
    pval_thresh: p-value threshold
    outdir: output directory for the snapshots
    """
    if not os.path.isdir(outdir):
        os.makedirs(outdir)
    # Define the quaternions that are gonna be used for the snapshots
    if (side == "L"):
        view_quaternions = {
            'intern': [0.5, 0.5, 0.5, 0.5],
            'extern': [0.5, -0.5, -0.5, 0.5],
            'bottom': [1, 0, 0, 0],
            'top': [0, 0, 1, 0]
        }
    else:
        view_quaternions = {
            'intern': [0.5, 0.5, 0.5, 0.5],
            'extern': [0.5, -0.5, -0.5, 0.5],
            'bottom': [1, 0, 0, 0],
            'top': [0, 0, 1, 0]
        }

    # Load h2 and pval dictionaries
    path_h2 = os.path.join(indir, 'h2_dict.json')
    with open(path_h2, 'r') as f:
        data = json.load(f)
    dict_h2 = json.loads(data)
    path_pval = os.path.join(indir, 'pval_dict.json')
    with open(path_pval, 'r') as f:
        data = json.load(f)
    dict_pval = json.loads(data)

    # Areal value in each vertex
    array_areals = gio.read(areals).darrays[0].data
    # Create arrays that will contain h2 and pval estimates on each vertex
    array_h2 = np.zeros(len(array_areals))
    array_pval = np.zeros(len(array_areals))
    list_areals = np.unique(array_areals)

    # Load the mesh in anatomist
    mesh = ana.loadObject(template)

    for areal in list_areals:
        # make sure areal has the right format
        areal = str(int(areal))
        if (dict_h2[side].has_key(areal) and
                # Check if areal passed the threshold frequency in both hemispheres
                dict_freq['L']["Areal_" + areal] *
                dict_freq['R']["Areal_" + areal] > 0):
            # Finally, checl if the p-value pass the pval threshold
            if dict_pval[side][areal] < pval_thresh:
                # Select the index of vertices belonging to the areal
                ind = np.where(array_areals == float(areal))[0]
                # Give them the h2 estimate of this areal
                array_h2[ind] = dict_h2[side][areal]
                # And the log10 pvalue
                array_pval[ind] = -np.log10(dict_pval[side][areal])

    ### Block performing the display of h2 estimates ###
    # Create anatomist window
    window = ana.createWindow('3D', geometry=[0, 0, 584, 584])
    tex = aims.TimeTexture(dtype='FLOAT')
    tex[0].assign(array_h2)
    pits_tex = ana.toAObject(tex)
    tex_mesh = ana.fusionObjects([mesh, pits_tex],
                                 method='FusionTexSurfMethod')
    tex_mesh.assignReferential(ref)
    tex_mesh.setMaterial(front_face='counterclockwise')
    pits_tex.setPalette(cb[0],
                        minVal=interval_cb[0][0],
                        maxVal=interval_cb[0][1],
                        absoluteMode=True)
    updateWindow(window, tex_mesh)

    # Loop through the quaternions and do the snapshot
    for sd in view_quaternions.keys():
        q = aims.Quaternion(view_quaternions[sd])
        window.camera(view_quaternion=view_quaternions[sd], zoom=0.65)
        # Snapshot file output
        output = os.path.join(outdir,
                              'snapshot_h2_' + side + '_' + sd + '.png')
        ana.execute('WindowConfig', windows=[window], snapshot=output)

    #### Block performing the display of pvalues estimates ###
    # Create anatomist window, second time (else zoom problem for snapshots)
    window = ana.createWindow('3D', geometry=[0, 0, 584, 584])
    tex = aims.TimeTexture(dtype='FLOAT')
    tex[0].assign(array_pval)
    pits_tex = ana.toAObject(tex)
    tex_mesh = ana.fusionObjects([mesh, pits_tex],
                                 method='FusionTexSurfMethod')
    tex_mesh.assignReferential(ref)
    tex_mesh.setMaterial(front_face='counterclockwise')
    pits_tex.setPalette(cb[1],
                        minVal=interval_cb[1][0],
                        maxVal=interval_cb[1][1],
                        absoluteMode=True)
    updateWindow(window, tex_mesh)

    # Loop through the quaternions and do the snapshot
    for sd in view_quaternions.keys():
        q = aims.Quaternion(view_quaternions[sd])
        window.camera(view_quaternion=view_quaternions[sd], zoom=0.65)
        # Snapshot file output
        output = os.path.join(outdir,
                              'snapshot_pval_' + side + '_' + sd + '.png')
        ana.execute('WindowConfig', windows=[window], snapshot=output)
Пример #28
0
            "mesh", "surface_analysis",
            "" + s_id + "_" + side + "white_pits_on_atlas.gii")
        file_DPF_on_atlas = os.path.join(
            path, s_id, "t1mri", "BL", "default_analysis", "segmentation",
            "mesh", "surface_analysis",
            "" + s_id + "_" + side + "white_DPF_on_atlas.gii")
        file_pits = os.path.join(path, s_id, "t1mri", "BL", "default_analysis",
                                 "segmentation", "mesh", "surface_analysis",
                                 "" + s_id + "_" + side + "white_pits.gii")
        file_DPF = os.path.join(path, s_id, "t1mri", "BL", "default_analysis",
                                "segmentation", "mesh", "surface_analysis",
                                "" + s_id + "_" + side + "white_DPF.gii")
        if os.path.isfile(file_pits_on_atlas) and os.path.isfile(
                file_DPF_on_atlas):
            count += 1
            array_pits_on_atlas = gio.read(file_pits_on_atlas).darrays[0].data
            array_DPF_on_atlas = gio.read(file_DPF_on_atlas).darrays[0].data
            array_pits = gio.read(file_pits).darrays[0].data
            array_DPF = gio.read(file_DPF).darrays[0].data
            pits = np.nonzero(array_pits)
            print "NUMBER PITS: " + str(len(pits[0]))
            """print pits[0]
            print array_DPF[pits]
            print len(array_DPF[pits])"""

            pits_on_atlas = np.nonzero(array_pits_on_atlas)
            print "NUMBER PITS ON ATLAS: " + str(len(pits_on_atlas[0]))
        avg_sum[side] += float(len(pits_on_atlas[0])) / len(pits[0])
    avg_sum[side] = avg_sum[side] / count

print avg_sum
Пример #29
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]

    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    # test float64 and int64, which must be converted to float32 and int32
    fa = dict()
    if include_nodes:
        fa['node_indices'] = ds.fa.node_indices.astype(np.int64)

    ds_float64 = Dataset(samples=ds.samples.astype(np.float64), fa=fa)
    ds_float64_again = gifti_dataset(map2gifti(ds_float64))
    assert_equal(ds_float64_again.samples.dtype, np.float32)
    if include_nodes:
        assert_equal(ds_float64_again.fa.node_indices.dtype, np.int32)


    # test contents of GIFTI image
    assert (isinstance(img2, nb_gifti.GiftiImage))
    nsamples = ds.samples.shape[0]
    if include_nodes:
        node_arr = img2.darrays[0]
        assert_equal(node_arr.intent,
                     intent_codes.code['NIFTI_INTENT_NODE_INDEX'])
        assert_equal(node_arr.coordsys, None)
        assert_equal(node_arr.data.dtype, np.int32)
        assert_equal(node_arr.datatype, data_type_codes['int32'])

        first_data_array_pos = 1
        narrays = nsamples + 1
    else:
        first_data_array_pos = 0
        narrays = nsamples

    assert_equal(len(img.darrays), narrays)
    for i in xrange(nsamples):
        arr = img2.darrays[i + first_data_array_pos]

        # check intent code
        illegal_intents = ['NIFTI_INTENT_NODE_INDEX',
                           'NIFTI_INTENT_GENMATRIX',
                           'NIFTI_INTENT_POINTSET',
                           'NIFTI_INTENT_TRIANGLE']
        assert (arr.intent not in [intent_codes.code[s]
                                   for s in illegal_intents])

        # although the GIFTI standard is not very clear about whether
        # arrays with other intent than NODE_INDEX can have a
        # GiftiCoordSystem, FreeSurfer's mris_convert
        # does not seem to like its presence. Thus we make sure that
        # it's not there.

        assert_equal(arr.coordsys, None)
        assert_equal(arr.data.dtype, np.float32)
        assert_equal(arr.datatype, data_type_codes['float32'])



    # another test for map2gifti, setting the encoding explicitly
    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
Пример #30
0
    data = json.load(f)
s_ids  = list(json.loads(data))
## CONSTANTS ##
FILTERING = False

THRESHOLD_DPF = 1.2
if FILTERING:
    OUTPUT = '/neurospin/brainomics/2016_HCP/pheno_pits_closest_filtered'+str(THRESHOLD_DPF)+'/'
else:
    OUTPUT = '/neurospin/brainomics/2016_HCP/pheno_pits_closest_filtered/'


for side in sides:
    t0 = time.time()
    file_parcels_on_atlas = path0 +'pits_density/clusters_'+side+'_average_pits_smoothed0.7_60_dist15.0_area100.0_ridge2.0.gii'
    array_parcels = gio.read(file_parcels_on_atlas).darrays[0].data
    parcels_org =  np.unique(array_parcels)
    NB_PARCELS = len(parcels_org)
    DATA_DPF = np.zeros((len(s_ids), NB_PARCELS))*np.nan
    filename_average = '/neurospin/imagen/workspace/cati/BVdatabase/average_'+side+'mesh_BL.gii'
    mesh_average = gio.read(filename_average)
    vertices_origin = mesh_average.darrays[0].data
    triangles_origin = mesh_average.darrays[1].data

    for k,parcel in enumerate(parcels_org):
        t = time.time()
        X = np.array([])
        for j, s_id in enumerate(s_ids):
            file_pits_on_atlas = os.path.join(path, s_id, "t1mri", "BL",
                                              "default_analysis", "segmentation", "mesh",
                                              "surface_analysis", ""+s_id+"_"+side+"white_pits_on_atlas.gii")
Пример #31
0
    '/neurospin/imagen/src/scripts/psc_tools/psc2_centre.csv', delimiter=',')
all_centres_subjects = {}
for j in range(len(psc2_centre)):
    label = str(int(psc2_centre[j][0]))
    for i in range(12 - len(label)):
        label = '0' + label
    all_centres_subjects[label] = centres_number[str(int(psc2_centre[j][1]))]

city = all_centres_subjects[s_id]
subject_folder = "/neurospin/imagen/workspace/cati/BVdatabase/" + city + "/" + s_id + "/"

file_pits = os.path.join(subject_folder, "t1mri", "BL", "default_analysis",
                         "segmentation", "mesh", "surface_analysis",
                         "" + s_id + "_" + side + "white_pits.gii")

pits_data = gio.read(file_pits).darrays[0].data
pits_data = np.zeros(len(pits_data))
pits_data[index_pit] = 1

file_white_mesh = os.path.join(subject_folder, "t1mri", "BL",
                               "default_analysis", "segmentation", "mesh",
                               "" + s_id + "_" + side + "white.gii")
white_mesh = ana.loadObject(file_white_mesh)

### Convert to readable format for
from soma import aims
tex = aims.TimeTexture(dtype='FLOAT')
#tex[0].assign(lh_pval2)
tex[0].assign(pits_data)
tex_pit = ana.toAObject(tex)
tex_mesh_pit = ana.fusionObjects([white_mesh, tex_pit],
Пример #32
0
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--sub", type=str, required=True)
parser.add_argument("-d", "--dir", type=str, default="./")

args = parser.parse_args()

subject=args.sub
dir=args.dir
subdir=dir+"/"+subject+"/"

os.chdir(args.dir)
os.mkdir(subdir+"vertices")


print("\n")
print ("Current Directory:")
print(os.getcwd() + "\n")
print ("Current Subject:")
print (subject)

for hemi in ['L', 'R']:

    myelin_file=subdir+'MNINonLinear/Native/'+subject+"."+hemi+".MyelinMap.native.func.gii"
    out_fname=subdir+"vertices/"+hemi+'_vertices'
    myelin_map=gio.read(myelin_file)
    myelin_vector=myelin_map.getArraysFromIntent(intent)[0].data
    zero_myelin=np.where(myelin_vector == 0)[0]
    nonzero_myelin=np.where(myelin_vector > 0)[0]
    np.savetxt(out_fname, nonzero_myelin+1,fmt='%d')

Пример #33
0
intent = 'NIFTI_INTENT_NORMAL'

parser = argparse.ArgumentParser()
parser.add_argument("-s", "--sub", type=str, required=True)
parser.add_argument("-d", "--dir", type=str, default="./")

args = parser.parse_args()

subject = args.sub
dir = args.dir
subdir = dir + "/" + subject + "/"

os.chdir(args.dir)
os.mkdir(subdir + "vertices")

print("\n")
print("Current Directory:")
print(os.getcwd() + "\n")
print("Current Subject:")
print(subject)

for hemi in ['L', 'R']:

    myelin_file = subdir + 'MNINonLinear/Native/' + subject + "." + hemi + ".MyelinMap.native.func.gii"
    out_fname = subdir + "vertices/" + hemi + '_vertices'
    myelin_map = gio.read(myelin_file)
    myelin_vector = myelin_map.getArraysFromIntent(intent)[0].data
    zero_myelin = np.where(myelin_vector == 0)[0]
    nonzero_myelin = np.where(myelin_vector > 0)[0]
    np.savetxt(out_fname, nonzero_myelin + 1, fmt='%d')
Пример #34
0
def get_parcel_info():
    # load parcellation
    lhfile = os.path.join(basedir,
                          'all_selected_L_new_parcel_renumbered.func.gii')
    rhfile = os.path.join(basedir,
                          'all_selected_R_new_parcel_renumbered.func.gii')

    lh = giftiio.read(lhfile)
    rh = giftiio.read(rhfile)

    lhparcvals = numpy.unique(lh.darrays[0].data)[1:]
    rhparcvals = numpy.unique(rh.darrays[0].data)[1:]

    lhinfomapfile = os.path.join(basedir, 'parcel_L_consensus_new.func.gii')
    rhinfomapfile = os.path.join(basedir, 'parcel_R_consensus_new.func.gii')

    lhinfomap = nibabel.gifti.giftiio.read(lhinfomapfile)
    rhinfomap = nibabel.gifti.giftiio.read(rhinfomapfile)

    # open community files
    communities_orig = numpy.loadtxt(
        os.path.join(
            basedir,
            'parcel_infomap_assigns_startpos50_neworder_communities.txt'))
    communities_orig_idx = numpy.loadtxt(
        os.path.join(
            basedir,
            'parcel_infomap_assigns_startpos50_neworder_consensus_ind.txt'))

    lhsurffile = os.path.join(fsdir,
                              'sub013.L.midthickness.32k_fs_LR.surf.gii')
    lhsurf = giftiio.read(lhsurffile)
    rhsurffile = os.path.join(fsdir,
                              'sub013.R.midthickness.32k_fs_LR.surf.gii')
    rhsurf = giftiio.read(rhsurffile)

    # get mean XYZ location and central vertex of each parcel
    lhcoords = numpy.zeros((len(lhparcvals), 3))
    lhcentvertex = numpy.zeros(len(lhparcvals))

    for i in range(len(lhparcvals)):
        v = lhparcvals[i]
        vertices = numpy.where(lh.darrays[0].data == v)[0]
        lhcoords[i, :] = numpy.mean(lhsurf.darrays[0].data[vertices, :], 0)
        vdist = numpy.zeros(len(vertices))
        for j in range(len(vertices)):
            vdist[j] = numpy.linalg.norm(lhsurf.darrays[0].data[j, :] -
                                         lhcoords[i, :])
        lhcentvertex[i] = vertices[vdist == numpy.min(vdist)]

    rhcoords = numpy.zeros((len(rhparcvals), 3))
    rhcentvertex = numpy.zeros(len(rhparcvals))

    for i in range(len(rhparcvals)):
        v = rhparcvals[i]
        vertices = numpy.where(rh.darrays[0].data == v)[0]
        rhcoords[i, :] = numpy.mean(rhsurf.darrays[0].data[vertices, :], 0)
        vdist = numpy.zeros(len(vertices))
        for j in range(len(vertices)):
            vdist[j] = numpy.linalg.norm(rhsurf.darrays[0].data[j, :] -
                                         rhcoords[i, :])
        rhcentvertex[i] = vertices[vdist == numpy.min(vdist)]

    # get labels for each parcel

    llabelfile = os.path.join(fsdir, 'sub013.L.aparc.32k_fs_LR.label.gii')
    rlabelfile = os.path.join(fsdir, 'sub013.R.aparc.32k_fs_LR.label.gii')

    lparc = giftiio.read(llabelfile)
    rparc = giftiio.read(rlabelfile)

    lhparclabels = []
    rhparclabels = []

    for i in range(len(lhcentvertex)):
        l = lparc.darrays[0].data[lhcentvertex[i]]
        lhparclabels.append(lparc.labeltable.labels[l].label)

    for i in range(len(rhcentvertex)):
        l = rparc.darrays[0].data[rhcentvertex[i]]
        rhparclabels.append(rparc.labeltable.labels[l].label)

    # get RSN labels from HCP tutorial data
    l_rsnlabelfile = os.path.join(tutorialdir,
                                  'RSN-networks.L.32k_fs_LR.label.gii')
    r_rsnlabelfile = os.path.join(tutorialdir,
                                  'RSN-networks.R.32k_fs_LR.label.gii')

    l_rsn = giftiio.read(l_rsnlabelfile)
    r_rsn = giftiio.read(r_rsnlabelfile)

    f = open(os.path.join(basedir, 'module_names.txt'))
    power_network_names = {}
    for l in f.readlines():
        l_s = l.strip().split('\t')
        power_network_names[float(l_s[0])] = l_s[1]
    f.close()

    l_rsnlabels = []
    r_rsnlabels = []
    l_rsnlabels_yeo7 = []
    r_rsnlabels_yeo7 = []
    l_rsnlabels_yeo17 = []
    r_rsnlabels_yeo17 = []

    for i in range(len(lhcentvertex)):
        l = lhinfomap.darrays[0].data[lhcentvertex[i]]
        l_rsnlabels.append(power_network_names[l])
        l = l_rsn.darrays[0].data[lhcentvertex[i]]
        l_rsnlabels_yeo7.append(l_rsn.labeltable.labels[l].label)
        l = l_rsn.darrays[1].data[lhcentvertex[i]]
        l_rsnlabels_yeo17.append(l_rsn.labeltable.labels[l].label)

    for i in range(len(rhcentvertex)):
        l = rhinfomap.darrays[0].data[rhcentvertex[i]]
        r_rsnlabels.append(power_network_names[l])
        l = r_rsn.darrays[0].data[rhcentvertex[i]]
        r_rsnlabels_yeo7.append(r_rsn.labeltable.labels[l].label)
        l = r_rsn.darrays[1].data[rhcentvertex[i]]
        r_rsnlabels_yeo17.append(r_rsn.labeltable.labels[l].label)

    # get lobe

    atlasfile = os.path.join(fsldir,
                             'data/atlases/MNI/MNI-maxprob-thr0-2mm.nii.gz')

    atlas = nibabel.load(atlasfile)
    atlasdata = atlas.get_data()
    lobenames = [
        'None', 'Caudate', 'Cerebellum', 'Frontal', 'Insula', 'Occipital',
        'Parietal', 'Putamen', 'Temporal', 'Thalamus'
    ]

    def mni2vox(mni, qform):
        tmp = numpy.linalg.inv(qform) * numpy.matrix(
            [mni[0], mni[1], mni[2], 1]).T
        return numpy.array(numpy.round(tmp.T[0, 0:3]))[0]

    l_lobe = []
    r_lobe = []

    for i in range(len(lhcentvertex)):
        vox = mni2vox(lhcoords[i, :], atlas.get_qform())
        lobe = atlasdata[vox[0], vox[1], vox[2]]
        l_lobe.append(lobenames[lobe])
    for i in range(len(rhcentvertex)):
        vox = mni2vox(rhcoords[i, :], atlas.get_qform())
        lobe = atlasdata[vox[0], vox[1], vox[2]]
        r_lobe.append(lobenames[lobe])

    use_aseg = True

    def vox2mni(vox, qform):
        vox = numpy.array([vox[0], vox[1], vox[2], 1])
        return qform.dot(vox)[:3]

    if use_aseg:
        # now get data from APARC
        f = open(os.path.join(stdir, 'aseg/aseg_fields.txt'))
        asegnames = {}
        for l in f.readlines():
            l_s = l.strip().split()
            asegnames[int(l_s[0])] = l_s[1]
            f.close()
        asegkeys = asegnames.keys()
        asegkeys.sort()
        asegimg = nibabel.load(
            os.path.join(stdir, 'aseg/aparc+aseg_reg2mni.nii.gz'))
        asegdata = asegimg.get_data()

        meancoords = {}
        for k in asegkeys:
            mni = []
            v = numpy.where(asegdata == k)
            nvox = len(v[0])
            for i in range(nvox):
                mni.append(
                    vox2mni([v[0][i], v[1][i], v[2][i]], asegimg.get_qform()))
            mni = numpy.matrix(mni)
            meancoords[k] = numpy.mean(mni, 0)

    f = open(os.path.join(basedir, 'parcel_data.txt'), 'w')
    ctr = 1
    for i in range(len(lhcentvertex)):
        f.write('%d\tL\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n' %
                (ctr, lhcoords[i, 0], lhcoords[i, 1], lhcoords[i, 2],
                 l_lobe[i], lhparclabels[i], l_rsnlabels[i],
                 l_rsnlabels_yeo7[i], l_rsnlabels_yeo17[i]))
        ctr += 1

    for i in range(len(rhcentvertex)):
        f.write('%d\tR\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n' %
                (ctr, rhcoords[i, 0], rhcoords[i, 1], rhcoords[i, 2],
                 r_lobe[i], rhparclabels[i], r_rsnlabels[i],
                 r_rsnlabels_yeo7[i], r_rsnlabels_yeo17[i]))
        ctr += 1

    if use_aseg:
        for k in asegkeys:
            if meancoords[k][0, 0] < 0:
                hemis = 'L'
            else:
                hemis = 'R'
            f.write('%d\t%s\t%0.2f\t%0.2f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n' %
                    (ctr, hemis, meancoords[k][0, 0], meancoords[k][0, 1],
                     meancoords[k][0, 2], 'subcortical', asegnames[k], 'na',
                     'na', 'na'))
            ctr += 1

    f.close()
Пример #35
0
def map_shared_genetic(template, sd, areals, indir, cb, interval_cb, rho,
                       trait, pval_thresh, outdir):
    """
    Parameters
    template: path to template mesh file
    areals: path to gii file containing the texture of areals
    sd: hemisphere side from which SOLAR estimates are displayed
    indir: directory containing h2 and pval dictionaries
    cb: colorbar names must be an array with [cb_h2, cb_pval]
    interval_cb: number boundaries for the colobar display
                 must be a double array [inter_h2[0,1], inter_pval[0,1]]
    pval_thresh: p-value threshold
    outdir: output directory for the snapshots
    """
    if not os.path.isdir(outdir):
        os.makedirs(outdir)
    # Define the quaternions that are gonna be used for the snapshots
    if (sd == "L"):
        view_quaternions = {
            'intern': [0.5, 0.5, 0.5, 0.5],
            'extern': [0.5, -0.5, -0.5, 0.5],
            'bottom': [1, 0, 0, 0],
            'top': [0, 0, 1, 0]
        }
    else:
        view_quaternions = {
            'intern': [0.5, -0.5, -0.5, 0.5],
            'extern': [0.5, 0.5, 0.5, 0.5],
            'bottom': [1, 0, 0, 0],
            'top': [0, 0, 1, 0]
        }

    # Load h2 and pval dictionaries
    path_h2 = os.path.join(indir, sd + rho + '_dict.json')
    with open(path_h2, 'r') as f:
        data = json.load(f)
    dict_h2 = json.loads(data)
    path_pval = os.path.join(indir, sd + rho + '_pval_dict.json')
    with open(path_pval, 'r') as f:
        data = json.load(f)
    dict_pval = json.loads(data)

    # Load h2 and pval dictionaries
    rho2 = 'scipy_rhop'
    path_h2 = os.path.join(indir, sd + rho2 + '_dict.json')
    with open(path_h2, 'r') as f:
        data = json.load(f)
    dict_rhop = json.loads(data)
    path_pval = os.path.join(indir, sd + rho2 + '_pval_dict.json')
    with open(path_pval, 'r') as f:
        data = json.load(f)
    dict_pval_rhop = json.loads(data)

    # Areal value in each vertex
    array_areals = gio.read(areals).darrays[0].data
    # Create arrays that will contain h2 and pval estimates on each vertex
    array_h2 = np.zeros(len(array_areals))
    array_pval = np.zeros(len(array_areals))
    list_areals = np.unique(array_areals)

    # Load the mesh in anatomist
    mesh = ana.loadObject(template)

    for areal in list_areals:
        # make sure areal has the right format
        areal = str(int(areal))
        if dict_h2.has_key(areal + '_' + col):
            # Finally, checl if the p-value pass the pval threshold
            if (dict_pval[areal + '_' + col] < pval_thresh and
                    # Force the phenotypic correlation to be significant
                    dict_pval_rhop[areal + '_' + col] < 5e-2 / (2 * 180)):
                # Select the index of vertices belonging to the areal
                ind = np.where(array_areals == float(areal))[0]
                # Give them the h2 estimate of this areal
                if 'rhog' in rho:  # Change this line for Figure 3
                    array_h2[ind] = abs(dict_h2[areal + '_' + col])
                else:
                    array_h2[ind] = dict_h2[areal + '_' + col]
                # And the log10 pvalue
                array_pval[ind] = -np.log10(dict_pval[areal + '_' + col])

    ### Block performing the display of h2 estimates ###
    # Create anatomist window
    window = ana.createWindow('3D', geometry=[0, 0, 584, 584])
    tex = aims.TimeTexture(dtype='FLOAT')
    tex[0].assign(array_h2)
    pits_tex = ana.toAObject(tex)
    tex_mesh = ana.fusionObjects([mesh, pits_tex],
                                 method='FusionTexSurfMethod')
    tex_mesh.assignReferential(ref)
    tex_mesh.setMaterial(front_face='counterclockwise')
    pits_tex.setPalette(cb[0],
                        minVal=interval_cb[0][0],
                        maxVal=interval_cb[0][1],
                        absoluteMode=True)
    updateWindow(window, tex_mesh)
    ana.execute('TexturingParams', objects=[tex_mesh], interpolation='rgb')

    # Loop through the quaternions and do the snapshot
    for vw in view_quaternions.keys():
        q = aims.Quaternion(view_quaternions[vw])
        window.camera(view_quaternion=view_quaternions[vw], zoom=0.65)
        # Snapshot file output
        output = os.path.join(
            outdir, '_'.join(['snapshot', trait, rho, sd, vw + '.png']))
        ana.execute('WindowConfig', windows=[window], snapshot=output)

    if False:
        ana.releaseObject(pits_tex)
        pits_tex = None
        ana.releaseObject(tex_mesh)
        tex_mesh = None

    #### Block performing the display of pvalues estimates ###
    # Create anatomist window, second time (else zoom problem for snapshots)
    window = ana.createWindow('3D', geometry=[0, 0, 584, 584])
    tex = aims.TimeTexture(dtype='FLOAT')
    tex[0].assign(array_pval)
    pits_tex = ana.toAObject(tex)
    tex_mesh = ana.fusionObjects([mesh, pits_tex],
                                 method='FusionTexSurfMethod')
    tex_mesh.assignReferential(ref)
    tex_mesh.setMaterial(front_face='counterclockwise')
    pits_tex.setPalette(cb[1],
                        minVal=interval_cb[1][0],
                        maxVal=interval_cb[1][1],
                        absoluteMode=True)
    updateWindow(window, tex_mesh)
    ana.execute('TexturingParams', objects=[tex_mesh], interpolation='rgb')
    # Loop through the quaternions and do the snapshot
    for vw in view_quaternions.keys():
        q = aims.Quaternion(view_quaternions[vw])
        window.camera(view_quaternion=view_quaternions[vw], zoom=0.65)
        # Snapshot file output
        output = os.path.join(
            outdir, '_'.join(['snapshot', trait, rho, 'pval', sd,
                              vw + '.png']))
        ana.execute('WindowConfig', windows=[window], snapshot=output)

    if False:
        ana.releaseObject(pits_tex)
        pits_tex = None
        ana.releaseObject(tex_mesh)
        tex_mesh = None
Пример #36
0
def display_folds(folds_file,
                  labels,
                  weights,
                  white_file=None,
                  interactive=True,
                  snap=False,
                  animate=False,
                  outdir=None,
                  name="folds",
                  actor_ang=(0., 0., 0.)):
    """ Display the folds computed by morphologist.

    The scene supports one feature activated via the keystroke:

    * 'p': Pick the data at the current mouse point. This will pop-up a window
      with information on the current pick (ie. the fold name).

    Parameters
    ----------
    folds_file: str( mandatory)
        the folds '.gii' file.
    labels: dict (mandatory)
        a mapping between a mesh id and its label.
    weights: dict (mandatory)
        a mapping between a mesh label and its wheight in [0, 1].
    white_file: str (optional, default None)
        if specified the white surface will be displayed.
    interactive: bool (optional, default True)
        if True display the renderer.
    snap: bool (optional, default False)
        if True create a snap of the scene: need a valid outdir.
    animate: bool (optional, default False)
        if True create a gif 360 degrees animation of the scene: need a valid
        outdir.
    outdir: str (optional, default None)
        an existing directory.
    name: str (optional, default 'folds')
        the basename of the generated files.
    actor_ang: 3-uplet (optinal, default (0, 0, 0))
        the actors x, y, z position (in degrees).
    """
    # Load the folds file
    image = gio.read(folds_file)
    nb_of_surfs = len(image.darrays)
    if nb_of_surfs % 2 != 0:
        raise ValueError("Need an odd number of arrays (vertices, triangles).")

    # Create an actor for each fold
    ren = pvtk.ren()
    ren.SetBackground(1, 1, 1)
    for vertindex in range(0, nb_of_surfs, 2):
        vectices = image.darrays[vertindex].data
        triangles = image.darrays[vertindex + 1].data
        labelindex = image.darrays[vertindex].get_metadata()["Timestep"]
        if labelindex != image.darrays[vertindex +
                                       1].get_metadata()["Timestep"]:
            raise ValueError("Gifti arrays '{0}' and '{1}' do not share the "
                             "same label.".format(vertindex, vertindex + 1))
        labelindex = int(labelindex)
        if labelindex in labels:
            label = labels[labelindex]
            if label in weights:
                weight = weights[label] * 256.
            else:
                weight = 0
        else:
            label = "NC"
            weight = 0
        surf = TriSurface(vectices, triangles, labels=None)
        actor = pvtk.surface(surf.vertices, surf.triangles,
                             surf.labels + weight)
        actor.label = label
        actor.RotateX(actor_ang[0])
        actor.RotateY(actor_ang[1])
        actor.RotateZ(actor_ang[2])
        pvtk.add(ren, actor)

    # Add the white surface if specified
    if white_file is not None:
        image = gio.read(white_file)
        nb_of_surfs = len(image.darrays)
        if nb_of_surfs != 2:
            raise ValueError("'{0}' does not a contain a valid white "
                             "mesh.".format(white_file))
        vectices = image.darrays[0].data
        triangles = image.darrays[1].data
        surf = TriSurface(vectices, triangles, labels=None)
        actor = pvtk.surface(surf.vertices,
                             surf.triangles,
                             surf.labels,
                             opacity=1,
                             set_lut=False)
        actor.label = "white"
        actor.RotateX(actor_ang[0])
        actor.RotateY(actor_ang[1])
        actor.RotateZ(actor_ang[2])
        pvtk.add(ren, actor)

    # Show the renderer
    if interactive:
        actor = pvtk.text("!!!!",
                          font_size=15,
                          position=(10, 10),
                          is_visible=False)
        pvtk.add(ren, actor)
        obs = LabelsOnPick(actor,
                           static_position=True,
                           to_keep_actors=["white"])
        pvtk.show(ren, title="morphologist folds", observers=[obs])

    # Create a snap
    if snap:
        if not os.path.isdir(outdir):
            raise ValueError("'{0}' is not a valid directory.".format(outdir))
        pvtk.record(ren, outdir, name, n_frames=1)

    # Create an animation
    if animate:
        if not os.path.isdir(outdir):
            raise ValueError("'{0}' is not a valid directory.".format(outdir))
        pvtk.record(ren,
                    outdir,
                    name,
                    n_frames=36,
                    az_ang=10,
                    animate=True,
                    delay=25)
Пример #37
0
pits_data_R = np.array([])
pits_data_L = np.array([])
count_R = 0
count_L = 0
total_density = np.array([])
for s_id in s_ids:
    path_s = path + s_id + '/'
    if "imagen" in path and "Freesurfer" not in path:
        s_id = s_id[len(s_id) - 12:]
    file_pits_R = path_s + 't1mri/BL/default_analysis/segmentation/mesh/surface_analysis_sym/' + s_id + '_Rwhite_pits_smoothed0.7_60_on_atlas.gii'
    file_pits_L = path_s + 't1mri/BL/default_analysis/segmentation/mesh/surface_analysis_sym/' + s_id + '_Lwhite_pits_smoothed0.7_60_on_atlas.gii'
    if os.path.isfile(file_pits_R):
        count_R += 1
        if pits_data_R.size == 0:
            pits_data_R = gio.read(file_pits_R).darrays[0].data
        else:
            pits_data_R += gio.read(file_pits_R).darrays[0].data
    if os.path.isfile(file_pits_L):
        count_L += 1
        if pits_data_L.size == 0:
            pits_data_L = gio.read(file_pits_L).darrays[0].data
        else:
            pits_data_L += gio.read(file_pits_L).darrays[0].data

pits_data_R_temp = pits_data_R / count_R
pits_data_L_temp = pits_data_L / count_L

total_density = (pits_data_R_temp + pits_data_L_temp) / 2
"""g_R = gio.read(file_pits_R)
g_L = gio.read(file_pits_L)
Пример #38
0
    with open(temp_file_s_ids, 'r') as f:
        data = json.load(f)
    s_ids = list(json.loads(data))

    ### OUTPUT ###
    if SYMMETRIC:
        OUTPUT = '/neurospin/brainomics/2016_HCP/new_pheno_threshold_' + feature_threshold + '/pheno_pits_sym_' + feature + '_' + database_parcel + '_Freesurfer_new/'
    else:
        OUTPUT = '/neurospin/brainomics/2016_HCP/new_pheno_threshold_' + feature_threshold + '/pheno_pits_' + feature + '_' + database_parcel + '_Freesurfer_new/'

    for side in sides:
        if SYMMETRIC:
            file_parcels_on_atlas = path_parcels + 'pits_density_update/clusters_total_average_pits_smoothed0.7_60_sym_dist15.0_area100.0_ridge2.0.gii'
        else:
            file_parcels_on_atlas = path_parcels + 'pits_density_update/clusters_' + side + '_average_pits_smoothed0.7_60_dist15.0_area100.0_ridge2.0.gii'
        array_parcels = gio.read(file_parcels_on_atlas).darrays[0].data
        parcels_org = np.unique(array_parcels)
        parcels_org = parcels_org[1:]
        NB_PARCELS = len(parcels_org)
        DATA_DPF = np.zeros((len(s_ids), NB_PARCELS)) * np.nan
        INPUT_side = INPUT + side + '/'
        thresholds = np.loadtxt(INPUT_side + 'thresholds' + side + '.txt')
        for j, s_id in enumerate(s_ids):
            if SYMMETRIC:
                file_pits_on_atlas = os.path.join(
                    path, s_id, "t1mri", "BL", "default_analysis",
                    "segmentation", "mesh", "surface_analysis_sym",
                    s_id + "_" + side + "white_pits_on_atlas.gii")
                file_DPF_on_atlas = os.path.join(
                    path,
                    s_id,
Пример #39
0
 if "Freesurfer" in path_parcels:
     if "sym" in path_parcels:
         template_mesh = '/neurospin/brainomics/folder_gii/' + sid.lower(
         ) + 'h.inflated.white.gii'
     else:
         template_mesh = '/neurospin/brainomics/folder_gii/' + sid.lower(
         ) + 'h.inflated.white.gii'
 else:
     template_mesh = '/neurospin/imagen/workspace/cati/templates/average_' + sid + 'mesh_BL.gii'
 meshes[side] = ana.loadObject(template_mesh)
 windows[side] = ana.createWindow('3D', geometry=[0, 0, 584, 584])
 if SYMMETRIC:
     file_parcels_on_atlas = path_parcels + 'pits_density_update/clusters_total_average_pits_smoothed0.7_60_sym_dist15.0_area100.0_ridge2.0.gii'
 else:
     file_parcels_on_atlas = path_parcels + 'pits_density_update/clusters_' + side + '_average_pits_smoothed0.7_60_dist15.0_area100.0_ridge2.0.gii'
 array_parcels = gio.read(file_parcels_on_atlas).darrays[0].data
 array_h2 = np.zeros(len(array_parcels))
 parcels_org = np.unique(array_parcels)
 for parcel in parcels_org:
     if dict_h2[pheno].has_key(str(int(parcel))):
         ind = np.where(array_parcels == parcel)[0]
         array_h2[ind] = dict_h2[pheno][str(int(parcel))]
 tex = aims.TimeTexture(dtype='FLOAT')
 tex[0].assign(array_h2)
 pits_tex[side] = ana.toAObject(tex)
 tex_mesh[side] = ana.fusionObjects([meshes[side], pits_tex[side]],
                                    method='FusionTexSurfMethod')
 if "Freesurfer" in path_parcels:
     ref = ana.createReferential()
     tr = ana.createTransformation(
         [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, -1], ref,
Пример #40
0
@author: margulies
"""

import scipy, h5py
from sklearn import manifold
import numpy as np
import nibabel.gifti.giftiio as gio

f = h5py.File(
    '/scr/murg2/HCP_new/HCP_Q1-Q6_GroupAvg_Related440_Unrelated100_v1/HCP_Q1-Q6_R468_rfMRI_groupAvg_left_corr_smoothed2_toR_nifti.mat',
    'r')
dataCorr = f.get('dataCorr')
dataCorr = np.array(dataCorr)

gii = gio.read(
    '/scr/murg2/HCP_new/HCP_Q1-Q6_GroupAvg_Related440_Unrelated100_v1/lh.cortex.gii'
)
structure = gii.darrays[0].data
cortex = np.where(structure != 1)[0]

d = []
for i in cortex:
    d.append(dataCorr[i, cortex])
data = np.array(d)

K = 1 - ((data + 1) / 2.)
tsne = manifold.TSNE(n_components=3, metric='precomputed')
X_tsne = tsne.fit_transform(K)

results = ([0] * 32492)
count = 0
Пример #41
0
def display_folds(folds_file, labels, weights, white_file=None,
                  interactive=True, snap=False, animate=False, outdir=None,
                  name="folds", actor_ang=(0., 0., 0.)):
    """ Display the folds computed by morphologist.

    The scene supports one feature activated via the keystroke:

    * 'p': Pick the data at the current mouse point. This will pop-up a window
      with information on the current pick (ie. the fold name).

    Parameters
    ----------
    folds_file: str( mandatory)
        the folds '.gii' file.
    labels: dict (mandatory)
        a mapping between a mesh id and its label.
    weights: dict (mandatory)
        a mapping between a mesh label and its wheight in [0, 1].
    white_file: str (optional, default None)
        if specified the white surface will be displayed.
    interactive: bool (optional, default True)
        if True display the renderer.
    snap: bool (optional, default False)
        if True create a snap of the scene: need a valid outdir.
    animate: bool (optional, default False)
        if True create a gif 360 degrees animation of the scene: need a valid
        outdir.
    outdir: str (optional, default None)
        an existing directory.
    name: str (optional, default 'folds')
        the basename of the generated files.
    actor_ang: 3-uplet (optinal, default (0, 0, 0))
        the actors x, y, z position (in degrees).
    """
    # Load the folds file
    image = gio.read(folds_file)
    nb_of_surfs = len(image.darrays)
    if nb_of_surfs % 2 != 0:
        raise ValueError("Need an odd number of arrays (vertices, triangles).")

    # Create an actor for each fold
    ren = pvtk.ren()
    ren.SetBackground(1, 1, 1)
    for vertindex in range(0, nb_of_surfs, 2):
        vectices = image.darrays[vertindex].data
        triangles = image.darrays[vertindex + 1].data
        labelindex = image.darrays[vertindex].get_metadata()["Timestep"]
        if labelindex != image.darrays[vertindex + 1].get_metadata()[
                "Timestep"]:
            raise ValueError("Gifti arrays '{0}' and '{1}' do not share the "
                             "same label.".format(vertindex, vertindex + 1))
        labelindex = int(labelindex)
        if labelindex in labels:
            label = labels[labelindex]
            if label in weights:
                weight = weights[label] * 256.
            else:
                weight = 0
        else:
            label = "NC"
            weight = 0
        surf = TriSurface(vectices, triangles, labels=None)
        actor = pvtk.surface(surf.vertices, surf.triangles,
                             surf.labels + weight)
        actor.label = label
        actor.RotateX(actor_ang[0])
        actor.RotateY(actor_ang[1])
        actor.RotateZ(actor_ang[2])
        pvtk.add(ren, actor)

    # Add the white surface if specified
    if white_file is not None:
        image = gio.read(white_file)
        nb_of_surfs = len(image.darrays)
        if nb_of_surfs != 2:
            raise ValueError("'{0}' does not a contain a valid white "
                             "mesh.".format(white_file))
        vectices = image.darrays[0].data
        triangles = image.darrays[1].data
        surf = TriSurface(vectices, triangles, labels=None)
        actor = pvtk.surface(surf.vertices, surf.triangles, surf.labels,
                             opacity=1, set_lut=False)
        actor.label = "white"
        actor.RotateX(actor_ang[0])
        actor.RotateY(actor_ang[1])
        actor.RotateZ(actor_ang[2])
        pvtk.add(ren, actor)

    # Show the renderer
    if interactive:
        actor = pvtk.text("!!!!", font_size=15, position=(10, 10),
                          is_visible=False)
        pvtk.add(ren, actor)
        obs = LabelsOnPick(actor, static_position=True,
                           to_keep_actors=["white"])
        pvtk.show(ren, title="morphologist folds", observers=[obs])

    # Create a snap
    if snap:
        if not os.path.isdir(outdir):
            raise ValueError("'{0}' is not a valid directory.".format(outdir))
        pvtk.record(ren, outdir, name, n_frames=1)

    # Create an animation
    if animate:
        if not os.path.isdir(outdir):
            raise ValueError("'{0}' is not a valid directory.".format(outdir))
        pvtk.record(ren, outdir, name, n_frames=36, az_ang=10, animate=True,
                    delay=25)
def create_phenotype(database,
                     s_ids,
                     array_areals,
                     areal_list,
                     side,
                     indir,
                     outdir,
                     sd_template='lh'):
    """ 
    Parameters

    database: path of Morphologist database
    array_areals: array containing the areals information for each vertex
    areal_list: contain the areal numbers
    s_ids: list of s_ids to consider
    side: either R or L
    indir: directory containing the thresholds table
    outdir: output directory
    sd_template: side of the template
    """
    if not os.path.isdir(outdir):
        os.makedirs(outdir)

    outdir_cc = os.path.join(outdir, 'case_control')
    if not os.path.isdir(outdir_cc):
        os.makedirs(outdir_cc)

    INPUT = os.path.join(indir, side)
    if THRESHOLDING:
        thr_file = os.path.join(INPUT, 'Areal_' + str(areal) + '.txt')
        thresholds = np.loadtxt(thr_file)

    NB_AREALS = len(areal_list)
    # Matrix containing the retained pit (if any) DPF value for each subject
    DATA_DPF = np.zeros((len(s_ids), NB_AREALS)) * np.nan

    for j, s_id in enumerate(s_ids):
        print "Currently processing s_id " + str(s_id)
        file_pits = os.path.join(database, s_id, "t1mri", "BL",
                                 "default_analysis", "segmentation", "mesh",
                                 "surface_analysis_" + sd_template,
                                 s_id + "_" + side + "white_pits_on_atlas.gii")
        file_DPF = os.path.join(database, s_id, "t1mri", "BL",
                                "default_analysis", "segmentation", "mesh",
                                "surface_analysis_" + sd_template,
                                s_id + "_" + side + "white_DPF_on_atlas.gii")
        array_pits = gio.read(file_pits).darrays[0].data
        array_DPF = gio.read(file_DPF).darrays[0].data

        # Remove the pits with a DPF below threshold
        if THRESHOLDING:
            for k, areal in enumerate(areal_list):
                ind = np.where(array_areals == areal)
                array_pits[ind] = (array_pits[ind] *
                                   (array_DPF[ind] > thresholds[k]))

        if False:
            # Locate the remaining pits
            index_pits = np.nonzero(array_pits)[0]
            # And their corresponding areals
            areals = array_areals[index_pits]
            for k, areal in enumerate(areal_list):
                ind = np.where(areals == areal)[0]
                # If the subject has pit in this areal we consider the deepest one
                if ind.size:
                    index_max_DPF = np.argmax(array_DPF[index_pits[ind]])
                    # MULTIPLY BY 20 BECAUSE
                    # SOLAR REQUIRES THIS TO ALLOW LARGE ENOUGH STD FOR ITS MODEL
                    # 20 is chosen to have an std sufficient in each areal
                    # It's somewhat arbitrary but doesn't affect the heritability
                    DATA_DPF[j, k] = array_DPF[index_pits[ind]][index_max_DPF]
        elif True:
            for k, areal in enumerate(areal_list):
                ind = np.where(array_areals == areal)[0]
                if ind.size:
                    index_max_DPF = np.argmax(array_DPF[ind])
                else:
                    DATA_DPF[j, k] = array_DPF[ind][index_max_DPF]
    """
    Process the DATA_DPF matrix 3 steps:
    1st filter out areals with less than 50% subjects having a pit
    2nd create DPF quantitative phenotype file for each areal kept
    3rd create a case control phenotype stating if a subject has pit or not
    """
    # We do not consider subject with pit DPF = 0,
    # because the pit DPF must be > 0.
    # Else use find zeros of numpy and replace them with almost 0
    DATA_DPF = np.nan_to_num(DATA_DPF)
    index_columns_kept = []
    # Identify the columns with at least 50% subjects having a sulcal pit
    for j in range(DATA_DPF.shape[1]):
        if np.count_nonzero(DATA_DPF[:, j]) > DATA_DPF.shape[0] * 0.5:
            index_columns_kept.append(j)

    # For the columns kept create a phenotype file containing subjects
    # with at least one pit
    for index in index_columns_kept:
        num = str(int(areal_list[index]))
        df3 = pd.DataFrame()
        df3['FID'] = np.asarray(s_ids)[np.nonzero(DATA_DPF[:, index])].tolist()
        df3['IID'] = df3['FID']
        df3['Areal_' + num] = DATA_DPF[:, index][np.nonzero(DATA_DPF[:,
                                                                     index])]
        output = os.path.join(outdir, 'DPF_pit' + num + 'side' + side + '.csv')
        df3.to_csv(output, sep=' ', header=True, index=False)

    # Create a case control phenotype file, stating if a subject has a pit
    df = pd.DataFrame()
    for j in range(DATA_DPF.shape[1]):
        df['Areal_' + str(int(areal_list[j]))] = DATA_DPF[:, j]
    df[df != 0] = 2
    df[df == 0] = 1
    df['IID'] = np.asarray(s_ids)
    output_cc = os.path.join(outdir_cc, 'all_pits_side' + side + '.csv')
    df.to_csv(output_cc, sep=',', header=True, index=False)