Exemple #1
0
def test_load_surf_data_file_nii_gii():
    # test loading of fake data from gifti file
    filename_gii = tempfile.mktemp(suffix='.gii')
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray = gifti.GiftiDataArray(data=np.zeros((20, )))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray = gifti.GiftiDataArray.from_array(np.zeros((20, )),
                                                 intent='t test')
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, filename_gii)
    assert_array_equal(load_surf_data(filename_gii), np.zeros((20, )))
    os.remove(filename_gii)

    # test loading of data from empty gifti file
    filename_gii_empty = tempfile.mktemp(suffix='.gii')
    gii_empty = gifti.GiftiImage()
    gifti.write(gii_empty, filename_gii_empty)
    assert_raises_regex(ValueError,
                        'must contain at least one data array',
                        load_surf_data, filename_gii_empty)
    os.remove(filename_gii_empty)

    # test loading of fake data from nifti file
    filename_nii = tempfile.mktemp(suffix='.nii')
    filename_niigz = tempfile.mktemp(suffix='.nii.gz')
    nii = nb.Nifti1Image(np.zeros((20, )), affine=None)
    nb.save(nii, filename_nii)
    nb.save(nii, filename_niigz)
    assert_array_equal(load_surf_data(filename_nii), np.zeros((20, )))
    assert_array_equal(load_surf_data(filename_niigz), np.zeros((20, )))
    os.remove(filename_nii)
    os.remove(filename_niigz)
def test_readwritedata():
    img = gi.read(DATA_FILE2)
    with InTemporaryDirectory():
        gi.write(img, "test.gii")
        img2 = gi.read("test.gii")
        assert_equal(img.numDA, img2.numDA)
        assert_array_almost_equal(img.darrays[0].data, img2.darrays[0].data)
def project_z_map(in_file, subject, hemi, proj_frac, fsaverage_fwhm=3.):

    out_dir = path(in_file).dirname()
    out_base = path(in_file).basename()
    if out_base[-3:] == ".gz":
        out_base = out_base[:-3]
    if out_base[-4:] == ".nii":
        out_base = out_base[:-4]
    else:
        raise Exception("problem with infile name %s" % infile)

    out_file_name = out_dir / (out_base + "-projfrac-%1.2f-%s.gii"
                               % (proj_frac, hemi))
    print out_file_name
    out_file_fsaverage = out_dir / ("avg_%s-projfrac-%1.2f-%s.gii"
                                    % (out_base, proj_frac, hemi))
    print out_file_fsaverage
    out_data = project_volume_data(in_file, hemi, subject_id=subject,
                              projmeth="frac", projarg=proj_frac,
                                   projsum="point")
    out_data_avg = project_volume_data(in_file, hemi, subject_id=subject,
                                  projmeth="frac", projarg=proj_frac,
                                  target_subject="fsaverage",
                                  smooth_fwhm=fsaverage_fwhm,
                                       projsum="point")

    gii_arr = gifti.GiftiDataArray.from_array(out_data, 0)
    out_gii = gifti.GiftiImage(darrays=[gii_arr])

    gii_arr_avg = gifti.GiftiDataArray.from_array(out_data_avg, 0)
    out_gii_avg = gifti.GiftiImage(darrays=[gii_arr_avg])

    gifti.write(out_gii, out_file_name)
    gifti.write(out_gii_avg, out_file_fsaverage)
Exemple #4
0
def test_readwritedata():
    img = gi.read(DATA_FILE2)
    newp = pjoin(tempfile.gettempdir(), "test.gii")
    gi.write(img, newp)
    img2 = gi.read(newp)

    assert_equal(img.numDA, img2.numDA)
    assert_array_almost_equal(img.darrays[0].data, img2.darrays[0].data)
Exemple #5
0
def write_mesh(cor, tri, filename):
    from nibabel.gifti import GiftiImage, GiftiDataArray
    nimg = GiftiImage()
    intent = 'NIFTI_INTENT_POINTSET'
    nimg.add_gifti_data_array(GiftiDataArray.from_array(cor,intent))
    intent = 'NIFTI_INTENT_TRIANGLE'
    nimg.add_gifti_data_array(GiftiDataArray.from_array(tri,intent))
    gifti.write(nimg, filename)
def mesh_from_arrays(coord, triangles, path=None):
    """ Create a mesh object from two arrays

    fixme:  intent should be set !
    """
    carray = GiftiDataArray().from_array(coord.astype(np.float32),
                                         "NIFTI_INTENT_POINTSET")
    tarray = GiftiDataArray().from_array(triangles, "NIFTI_INTENT_TRIANGLE")
    img = GiftiImage(darrays=[carray, tarray])
    if path is not None:
        write(img, path)
    return img
Exemple #7
0
def write_texture(tex_data, filename, intent=None, meta_data=None):
    """ Write the n-dimensional numpy array 'text_data' into filename
    using gifti (.gii) or tex format.
    """
    if tex_data.dtype == np.bool:
        tex_data = tex_data.astype(np.int16)

    if has_ext_gzsafe(filename, 'gii'):
        #tex = np.arange(len(cor), dtype=int)
        if intent is None:
            if np.issubdtype(tex_data.dtype, np.int):
                intent = 'NIFTI_INTENT_LABEL'
                dtype = None
            elif np.issubdtype(tex_data.dtype, np.float):
                intent = 'NIFTI_INTENT_NONE'
                #intent = 'dimensionless' #?fg
                #dtype = 'NIFTI_TYPE_FLOAT32'
                tex_data.astype(np.float32)
                if pyhrf.cfg['global']['write_texture_minf']:
                    s = "attributes = {'data_type' : 'FLOAT'}"
                    f = open(filename + '.minf', 'w')
                    f.write(s)
                    f.close()
            else:
                raise NotImplementedError("Unsupported dtype %s" \
                                              %str(tex_data.dtype))

        gii_array = gifti.GiftiDataArray.from_array(tex_data, intent)
        if meta_data is not None:
            md = {'pyhrf_cuboid_data':meta_data}
            gmeta_data = gifti.GiftiMetaData.from_dict(md)
        else:
            gmeta_data = None
        tex_gii = gifti.GiftiImage(darrays=[gii_array,], meta=gmeta_data)
        pyhrf.verbose(3, 'Write texture to %s'%filename)
        gifti.write(tex_gii, filename)

    elif has_ext_gzsafe(filename, 'tex'):
        if meta_data is not None:
            print 'Warning: meta ignored when saving to tex format'

        from pyhrf.tools.io.tio import Texture
        tex = Texture(filename, data=tex_data)
        tex.write()
    else:
        raise NotImplementedError('Unsupported extension for file %s' \
                                      %filename)
Exemple #8
0
def extract_sub_mesh_with_files(input_mesh, center_node, radius, 
                                output_mesh=None):
    from nibabel import gifti
    from nibabel.gifti import GiftiImage, GiftiDataArray
    from pyhrf.tools.io import read_mesh
    cor, tri, coord_sys = read_mesh(input_mesh)
    sub_cor, sub_tri = extract_sub_mesh(cor, tri, center_node, radius)
    
    #nimg = GiftiImage_fromTriangles(sub_cor, sub_tri)
    nimg = GiftiImage()
    intent = 'NIFTI_INTENT_POINTSET'
    nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_cor,intent))
    intent = 'NIFTI_INTENT_TRIANGLE'
    nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_tri,intent))

    if output_mesh is None:
        output_mesh = non_existent_file(add_suffix(input_mesh, '_sub'))
    pyhrf.verbose(1, 'Saving extracted mesh to %s' %output_mesh)
    gifti.write(nimg, output_mesh)
    return sub_cor, sub_tri, coord_sys
Exemple #9
0
def test_load_surf_data_file_glob():

    data2D = np.ones((20, 3))
    fnames = []
    for f in range(3):
        fnames.append(tempfile.mktemp(prefix='glob_%s_' % f, suffix='.gii'))
        data2D[:, f] *= f
        if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
            darray = gifti.GiftiDataArray(data=data2D[:, f])
        else:
            # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
            # initialized properly:
            darray = gifti.GiftiDataArray.from_array(data2D[:, f],
                                                     intent='t test')
        gii = gifti.GiftiImage(darrays=[darray])
        gifti.write(gii, fnames[f])

    assert_array_equal(load_surf_data(os.path.join(os.path.dirname(fnames[0]),
                                                   "glob*.gii")), data2D)

    # make one more gii file that has more than one dimension
    fnames.append(tempfile.mktemp(prefix='glob_3_', suffix='.gii'))
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray1 = gifti.GiftiDataArray(data=np.ones((20, )))
        darray2 = gifti.GiftiDataArray(data=np.ones((20, )))
        darray3 = gifti.GiftiDataArray(data=np.ones((20, )))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray1 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
        darray2 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
        darray3 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
    gii = gifti.GiftiImage(darrays=[darray1, darray2, darray3])
    gifti.write(gii, fnames[-1])

    data2D = np.concatenate((data2D, np.ones((20, 3))), axis=1)
    assert_array_equal(load_surf_data(os.path.join(os.path.dirname(fnames[0]),
                                                   "glob*.gii")), data2D)

    # make one more gii file that has a different shape in axis=0
    fnames.append(tempfile.mktemp(prefix='glob_4_', suffix='.gii'))
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray = gifti.GiftiDataArray(data=np.ones((15, 1)))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray = gifti.GiftiDataArray.from_array(np.ones(15, 1),
                                                 intent='t test')
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, fnames[-1])

    assert_raises_regex(ValueError,
                        'files must contain data with the same shape',
                        load_surf_data,
                        os.path.join(os.path.dirname(fnames[0]), "*.gii"))
    for f in fnames:
        os.remove(f)
def save_texture(path, data, intent='none', verbose=False):
    """
    volume saving utility for textures
    
    Parameters
    ----------
    path, string, output image path
    data, array of shape (nnode)
          data to be put in the volume
    intent: string, optional
            intent

    Fixme
    -----
    Missing checks
    Handle the case where data is multi-dimensional ? 
    """
    from nibabel.gifti import write, GiftiDataArray, GiftiImage
    if verbose:
        print 'Warning: assuming a float32 gifti file'
    darray = GiftiDataArray().from_array(data.astype(np.float32), intent)
    img = GiftiImage(darrays=[darray])
    write(img, path)
Exemple #11
0
def test_load_surf_mesh_file_gii():
    # Test the loader `load_surf_mesh`

    # If nibabel is of older version we skip tests as nibabel does not
    # support intent argument and intent codes are not handled properly with
    # older versions

    if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'):
        raise SkipTest

    mesh = generate_surf()

    # test if correct gii is loaded into correct list
    filename_gii_mesh = tempfile.mktemp(suffix='.gii')

    coord_array = gifti.GiftiDataArray(data=mesh[0],
                                       intent=nb.nifti1.intent_codes[
                                           'NIFTI_INTENT_POINTSET'])
    face_array = gifti.GiftiDataArray(data=mesh[1],
                                      intent=nb.nifti1.intent_codes[
                                          'NIFTI_INTENT_TRIANGLE'])

    gii = gifti.GiftiImage(darrays=[coord_array, face_array])
    gifti.write(gii, filename_gii_mesh)
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0])
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1])
    os.remove(filename_gii_mesh)

    # test if incorrect gii raises error
    filename_gii_mesh_no_point = tempfile.mktemp(suffix='.gii')
    gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]),
                filename_gii_mesh_no_point)
    assert_raises_regex(ValueError, 'NIFTI_INTENT_POINTSET',
                        load_surf_mesh, filename_gii_mesh_no_point)
    os.remove(filename_gii_mesh_no_point)

    filename_gii_mesh_no_face = tempfile.mktemp(suffix='.gii')
    gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]),
                filename_gii_mesh_no_face)
    assert_raises_regex(ValueError, 'NIFTI_INTENT_TRIANGLE',
                        load_surf_mesh, filename_gii_mesh_no_face)
    os.remove(filename_gii_mesh_no_face)
def run_glms(subject):
    # necessary paths
    analysis_dir = os.path.join(spm_dir, subject, 'analyses')
    subject_dir = os.path.join(work_dir, subject)
    if os.path.exists(subject_dir) == False:
        os.mkdir(subject_dir)
    fmri_dir = os.path.join(subject_dir, 'fmri')
    if os.path.exists(fmri_dir) == False:
        os.mkdir(fmri_dir)
    result_dir = os.path.join(fmri_dir, 'results')
    if os.path.exists(result_dir) == False:
        os.mkdir(result_dir)
    memory = Memory(cachedir=os.path.join(fmri_dir, 'cache_dir'), verbose=0)
    
    # audiosentence protocol
    # step 1: get the necessary files
    spm_fmri_dir = os.path.join(spm_dir, subject, 'fMRI/audiosentence')
    onset_dir = os.path.join(analysis_dir, 'audiosentence')
    onset_files = glob.glob(os.path.join(onset_dir, 'onsetfile*.mat'))
    motion_files = glob.glob(
        os.path.join(spm_fmri_dir, 'rp*.txt'))
    left_fmri_files = glob.glob(os.path.join(spm_fmri_dir, 'sraaudio*_lh.gii'))
    right_fmri_files = glob.glob(os.path.join(spm_fmri_dir, 'sraaudio*_rh.gii'))
    onset_files.sort()
    motion_files.sort()
    left_fmri_files.sort()
    right_fmri_files.sort()
    
    # get the ratings of the trials
    final_data = os.path.join(behavioral_dir, subject,
                               'finaldata_%s.mat' %subject)
    ratings = make_ratings(final_data)
    
    # scan times
    n_scans = 200
    lh_effects, lh_variances, rh_effects, rh_variances = {}, {}, {}, {}
    for i, (onset_file, motion_file, left_fmri_file, right_fmri_file) in\
            enumerate(zip(
            onset_files, motion_files, left_fmri_files, right_fmri_files)):
        # Create the design matrix
        dmtx = audiosentence_dmtx(final_data, motion_file, n_scans, tr, i)
        ax = dmtx.show()
        ax.set_position([.05, .25, .9, .65])
        ax.set_title('Design matrix')
        session_contrasts = audiosentence_contrasts(dmtx.names, final_data, i)
        fmri_glm = GeneralLinearModel(dmtx.matrix)

        # left hemisphere
        Y = np.array([darrays.data for darrays in read(left_fmri_file).darrays])
        # fit the GLM
        fmri_glm.fit(Y, model='ar1')
        # Estimate the contrasts
        print('Computing contrasts...')
        for index, contrast_id in enumerate(session_contrasts):
            print('  Contrast % i out of %i: %s' %
                  (index + 1, len(session_contrasts), contrast_id))
            # save the z_image
            contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
            if i == 0:
                lh_effects[contrast_id] = [contrast_.effect.ravel()]
                lh_variances[contrast_id] = [contrast_.variance.ravel()]
            else:
                lh_effects[contrast_id].append(contrast_.effect.ravel())
                lh_variances[contrast_id].append(contrast_.variance.ravel())
        
        # right hemisphere
        Y = np.array(
            [darrays.data for darrays in read(right_fmri_file).darrays])
        # fit the GLM
        fmri_glm.fit(Y, model='ar1')

        # Estimate the contrasts
        
        for index, contrast_id in enumerate(session_contrasts):
            # save the z_image
            contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
            if i == 0:
                rh_effects[contrast_id] = [contrast_.effect.ravel()]
                rh_variances[contrast_id] = [contrast_.variance.ravel()]
            else:
                rh_effects[contrast_id].append(contrast_.effect.ravel())
                rh_variances[contrast_id].append(contrast_.variance.ravel())
        
    
    for index, contrast_id in enumerate(session_contrasts):
        # left hemisphere
        _, _, z_map = fixed_effects(
            lh_effects[contrast_id], lh_variances[contrast_id])
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_lh.gii' % contrast_id)
        write(z_texture, z_map_path)
        # right hemisphere
        _, _, z_map = fixed_effects(
            rh_effects[contrast_id], rh_variances[contrast_id])
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_rh.gii' % contrast_id)
        write(z_texture, z_map_path)

    #########################################################################
    # localizer protocol
    # get the necessary files
    spm_fmri_dir = os.path.join(spm_dir, subject, 'fMRI/localizer')
    motion_file, = glob.glob(
        os.path.join(spm_dir, subject, 'fMRI/localizer/rp*.txt'))
    left_fmri_file = glob.glob(
        os.path.join(spm_fmri_dir, 'sralocalizer*_lh.gii'))[0]
    right_fmri_file = glob.glob(
        os.path.join(spm_fmri_dir, 'sralocalizer*_rh.gii'))[0]
    
    n_scans = 205

    # Create the design matrix
    dmtx = localizer_dmtx(motion_file, n_scans, tr)
    ax = dmtx.show()
    ax.set_position([.05, .25, .9, .65])
    ax.set_title('Design matrix')
    session_contrasts = localizer_contrasts(dmtx)
    fmri_glm = GeneralLinearModel(dmtx.matrix)
    
    # left hemisphere
    Y = np.array([darrays.data for darrays in read(left_fmri_file).darrays])
    # fit the GLM
    fmri_glm.fit(Y, model='ar1')
    # Estimate the contrasts
    print('Computing contrasts...')
    for index, contrast_id in enumerate(session_contrasts):
        print('  Contrast % i out of %i: %s' %
              (index + 1, len(session_contrasts), contrast_id))
        # save the z_image
        contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
        z_map = contrast_.z_score()
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_lh.gii' % contrast_id)
        write(z_texture, z_map_path)

    # right hemisphere
    Y = np.array([darrays.data for darrays in read(right_fmri_file).darrays])
    # fit the GLM
    fmri_glm.fit(Y, model='ar1')
    # Estimate the contrasts
    print('Computing contrasts...')
    for index, contrast_id in enumerate(session_contrasts):
        print('  Contrast % i out of %i: %s' %
              (index + 1, len(session_contrasts), contrast_id))
        # save the z_image
        contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
        z_map = contrast_.z_score()
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_rh.gii' % contrast_id)
        write(z_texture, z_map_path)
    
    #########################################################################
    # VisualCategs protocol
    # get the necessary files
    spm_fmri_dir = os.path.join(spm_dir, subject, 'fMRI/visualcategs')
    onset_dir = os.path.join(analysis_dir, 'visualcategs')
    onset_files = glob.glob(os.path.join(onset_dir, 'onsetfile*.mat'))
    motion_files = glob.glob(
        os.path.join(spm_dir, subject, 'fMRI/visualcategs/rp*.txt'))
    fmri_files = glob.glob(os.path.join(fmri_dir, 'crvisu*.nii.gz'))
    onset_files.sort()
    motion_files.sort()
    fmri_files.sort()

    left_fmri_files = glob.glob(
        os.path.join(spm_fmri_dir, 'sravisu*_lh.gii'))
    right_fmri_files = glob.glob(
        os.path.join(spm_fmri_dir, 'sravisu*_rh.gii'))
    n_scans = 185

    lh_effects, lh_variances, rh_effects, rh_variances = {}, {}, {}, {}
    
    for i, (onset_file, motion_file, left_fmri_file, right_fmri_file) in\
            enumerate(zip(
            onset_files, motion_files, left_fmri_files, right_fmri_files)):
        # Create the design matrix
        dmtx = visualcategs_dmtx(onset_file, motion_file, n_scans, tr)
        ax = dmtx.show()
        ax.set_position([.05, .25, .9, .65])
        ax.set_title('Design matrix')
        session_contrasts = visualcategs_contrasts(dmtx.names)
        fmri_glm = GeneralLinearModel(dmtx.matrix)
    
        # left hemisphere
        Y = np.array([darrays.data for darrays in read(left_fmri_file).darrays])
        # fit the GLM
        fmri_glm.fit(Y, model='ar1')
        # Estimate the contrasts
        print('Computing contrasts...')
        for index, contrast_id in enumerate(session_contrasts):
            print('  Contrast % i out of %i: %s' %
                  (index + 1, len(session_contrasts), contrast_id))
            # save the z_image
            contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
            if i == 0:
                lh_effects[contrast_id] = [contrast_.effect.ravel()]
                lh_variances[contrast_id] = [contrast_.variance.ravel()]
            else:
                lh_effects[contrast_id].append(contrast_.effect.ravel())
                lh_variances[contrast_id].append(contrast_.variance.ravel())

        # right hemisphere
        Y = np.array([
                darrays.data for darrays in read(right_fmri_file).darrays])
        # fit the GLM
        fmri_glm.fit(Y, model='ar1')
        # Estimate the contrasts
        print('Computing contrasts...')
        for index, contrast_id in enumerate(session_contrasts):
            print('  Contrast % i out of %i: %s' %
                  (index + 1, len(session_contrasts), contrast_id))
            # save the z_image
            contrast_ = fmri_glm.contrast(session_contrasts[contrast_id])
            if i == 0:
                rh_effects[contrast_id] = [contrast_.effect.ravel()]
                rh_variances[contrast_id] = [contrast_.variance.ravel()]
            else:
                rh_effects[contrast_id].append(contrast_.effect.ravel())
                rh_variances[contrast_id].append(contrast_.variance.ravel())

    for index, contrast_id in enumerate(session_contrasts):
        # left hemisphere
        _, _, z_map = fixed_effects(
            lh_effects[contrast_id], lh_variances[contrast_id])
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_lh.gii' % contrast_id)
        write(z_texture, z_map_path)
        # right hemisphere
        _, _, z_map = fixed_effects(
            rh_effects[contrast_id], rh_variances[contrast_id])
        z_texture = GiftiImage(
            darrays=[GiftiDataArray().from_array(z_map, intent='t test')])
        z_map_path = os.path.join(result_dir, '%s_z_map_rh.gii' % contrast_id)
        write(z_texture, z_map_path)
            x, y, z, triangles, scalars=tex)
        thresh = mlab.pipeline.threshold(func_mesh, low=THRESHOLD)
        mlab.pipeline.surface(thresh, colormap="hot", vmin=THRESHOLD, vmax=7)

"""
# plot individual images
for contrast in contrasts:
    display(os.path.join(fun_work_dir, subject, 'fmri/results'),
            fs_dir, contrast)
"""

from scipy.stats import ttest_1samp
from nibabel.gifti import read, write, GiftiDataArray, GiftiImage
write_dir='/tmp'

for contrast in contrasts:
    for side in ['lh', 'rh']:
        stat_img = [os.path.join(fun_work_dir, subject, 'fmri/results',
                                 '%s_z_map_%s.gii' % (contrast, side))
                    for subject in subjects]
        X = np.array([np.asarray(read(simg).darrays[0].data)
                      for simg in stat_img])
        t_vals, _ = ttest_1samp(X, 0)
        stat_texture = GiftiImage(darrays=[
                    GiftiDataArray().from_array(t_vals, intent='z score')])
        stat_path = os.path.join(write_dir, '%s_z_map_%s.gii' %
                                 (contrast, side))
        write(stat_texture, stat_path)

    display(write_dir, fs_dir, contrast)
def smooth_texture(mesh, input_texture, output_texture=None, sigma=1,
                   lsigma=1., mask=None):
   """ Smooth a texture along some mesh

   parameters
   ----------
   mesh: string,
         path to gii mesh
   input_texture: string,
                  texture path
   ouput_texture: string,
                  smooth texture path
   sigma: float,
          desired amount of smoothing
   lsigma: float,
           approximate smoothing in one iteration
   mask: string,
         path of a mask texture
   """
   import nipy.algorithms.graph.field as ff

   G = mesh_to_graph(mesh)
   if mask is not None:
       mask = read(mask).darrays[0].data > 0
       G = G.subgraph(mask)
   add_edges = np.vstack((np.arange(G.V), np.arange(G.V))).T
   edges = np.vstack((G.edges, add_edges))
   weights = np.concatenate((G.weights, np.zeros(G.V)))
   weights = np.maximum(np.exp(- weights ** 2 / ( 2 * lsigma ** 2)), 1.e-15)

   f = ff.Field(G.V, edges, weights)
   # need to re-order the edges
   order = np.argsort(f.edges.T[0] * f.V + f.edges.T[1])
   f.edges, f.weights = f.edges[order], f.weights[order]
   f.normalize(0)
   niter = (sigma * 1. / lsigma) ** 2
   
   if input_texture[-4:] == '.tex':
       import tio as tio
       data = tio.Texture("").read(input_texture).data
   else:
       data = read(input_texture).darrays[0].data
   if mask is not None:
       data = data[mask]
   dtype = data.dtype
   data[np.isnan(data)] = 0
   f.set_field(data.T)
   f.diffusion(niter)
   data = f.get_field().astype(dtype)

   if output_texture is not None:
       if output_texture[-4:] == '.tex':
           import tio as tio
           tio.Texture("", data=data.T).write(output_texture)
           print 'tex' 
       else:
           intent = 0
           wdata = data
           if mask is not None:
               wdata = mask.astype(np.float)
               wdata[mask > 0] = data
           darray = GiftiDataArray().from_array(wdata.astype(np.float32), 
                                                intent)
           img = GiftiImage(darrays=[darray])
           write(img, output_texture)
   return data
Exemple #15
0
def save_data(obj):

    objrep = str(type(obj))

    if hasattr(obj, 'data'):

        # it appears that there is no remove function for zip archives implemented to date
        # http://bugs.python.org/issue6818

        # the file was loaded, thus it exists a .tmpsrc pointing to
        # its absolute path. Use this path to overwrite the file by the
        # current .data data
        if hasattr(obj, 'tmpsrc'):
            tmpfname = obj.tmpsrc
        else:
            # if it has no .tmpsrc, i.e. it is not loaded from a file path
            # but it has a .data set
            raise Exception('Element %s cannot be saved. (It was never loaded)' % str(obj))

        dname = op.dirname(tmpfname)
        if not op.exists(dname):
            os.makedirs(dname)

        if 'CVolume' in objrep:
            print "Saving CVolume ..."
            ni.save(obj.data, tmpfname)
            print "Done."
        elif 'CNetwork' in objrep:
            print "Saving CNetwork"
            if obj.fileformat == "GraphML":
                # write graph to temporary file
                nx.write_graphml(obj.data, tmpfname)
            elif obj.fileformat == "GEXF":
                nx.write_gexf(obj.data, tmpfname)
            elif obj.fileformat == "NXGPickle":
                nx.write_gpickle(obj.data, tmpfname)
            else:
                raise NotSupportedFormat("Other", str(obj))
            print "Done."

        elif 'CSurface' in objrep:
            if obj.fileformat == "Gifti":
                import nibabel.gifti as nig
                nig.write(obj.data, tmpfname)
            else:
                raise NotSupportedFormat("Other", str(obj))

        elif 'CTrack' in objrep:
            if obj.fileformat == "TrackVis":
                ni.trackvis.write(tmpfname, obj.data[0], obj.data[1])
            else:
                raise NotSupportedFormat("Other", str(obj))

        elif 'CTimeserie' in objrep:
            if obj.fileformat == "HDF5":
                # flush the data of the buffers
                obj.data.flush()
                # close the file
                obj.data.close()
            elif obj.fileformat == "NumPy":
                load = np.save(tmpfname, obj.data)
            else:
                raise NotSupportedFormat("Other", str(obj))

        elif 'CData' in objrep:

            if obj.fileformat == "NumPy":
                load = np.save(tmpfname, obj.data)
            elif obj.fileformat == "HDF5":
                # flush the data of the buffers
                obj.data.flush()
                # close the file
                obj.data.close()
            elif obj.fileformat == "XML":
                f = open(tmpfname, 'w')
                f.write(obj.data)
                f.close()
            elif obj.fileformat == "JSON":
                f = open(tmpfname, 'w')
                json.dump(obj.data, f)
                f.close()
            elif obj.fileformat == "Pickle":
                f = open(tmpfname, 'w')
                pickle.dump(obj.data, f)
                f.close()
            elif obj.fileformat == "CSV" or obj.fileformat == "TXT":
                # write as text
                f = open(tmpfname, 'w')
                f.write(obj.data)
                f.close()
            else:
                raise NotSupportedFormat("Other", str(obj))

        elif 'CScript' in objrep:
                f = open(tmpfname, 'w')
                f.write(obj.data)
                f.close()

        return tmpfname

    else:
        # assumes the .src paths are given relative to the meta.cml
        # valid for iszip = True and iszip = False
        # either path to the .cff or to the meta.cml
        # return op.join(op.dirname(obj.parent_cfile.fname), obj.src)
        print "Connectome Object is not loaded. Nothing to save."
        return ''
labelsf = ward.labels_

for i in range(len(np.unique(labelsf))):
    print 'label %d: %d' % (i, len(labelsf[labelsf == i]))
# write the final gifti parcellation
print 'write parcellation.gii'

ii = 0
for i in surfmask_inds:
    seedroi_gii.darrays[darrays_int].data[i] = labelsf[ii]+2
    ii += 1

# if use the label2surf seed mask, then remove the vertex and triangle information and save the gifti texture in a new gifti file
if space == 'surface':
    # save the mesh
    output_mesh_path = op.join(surface_dir,'{}_norma_{}_{}_parcellation_cl{}_mesh.gii'.format(output_name, hemi, altas, nb_clusters))
    ng.write(seedroi_gii, output_mesh_path)
#   remove the voertex and triangles
    seedroi_gii.remove_gifti_data_array(0)
    seedroi_gii.remove_gifti_data_array(0)

ng.write(seedroi_gii, output_gii_parcellation_path)
print 'save parcellation:\n'+output_gii_parcellation_path

# =============== visulizaton ==========================================================================================
inflated_surface_path = op.join(fs_subject_dir, subject, 'surf', '{}.inflated'.format(hemi.lower()))

# visualization of the parcellation in freeview, overlay on the inflated sueface
cmd_freeview = '%s/freeview -f %s:overlay=%s &' % (fs_exec_dir, inflated_surface_path, output_gii_parcellation_path)
print(cmd_freeview)
def fixed_effects_analysis(subject_dic,
                           surface=False,
                           mask_img=None,
                           lowres=False):
    """ Combine the AP and PA images """
    from nibabel import load, save
    from nilearn.plotting import plot_stat_map

    session_ids = subject_dic['session_id']
    task_ids = _session_id_to_task_id(session_ids)
    paradigms = np.unique(task_ids)
    if mask_img is None:
        mask_img = os.path.join(subject_dic['output_dir'], "mask.nii.gz")

    # Guessing paradigm from file name
    for paradigm in paradigms:
        # select the sessions relevant for the paradigm
        session_paradigm = [
            session_id for (session_id, task_id) in zip(session_ids, task_ids)
            if task_id == paradigm
        ]
        # define the relevant contrasts
        contrasts = make_contrasts(paradigm).keys()
        # create write_dir
        if surface:
            if lowres:
                write_dir = os.path.join(subject_dic['output_dir'],
                                         'res_fsaverage5_%s_ffx' % paradigm)
            else:
                write_dir = os.path.join(subject_dic['output_dir'],
                                         'res_surf_%s_ffx' % paradigm)
            dirs = [
                os.path.join(write_dir, stat)
                for stat in ['effect_surf', 'variance_surf', 'stat_surf']
            ]
        else:
            write_dir = os.path.join(subject_dic['output_dir'],
                                     'res_stats_%s_ffx' % paradigm)
            dirs = [
                os.path.join(write_dir, stat) for stat in
                ['effect_size_maps', 'effect_variance_maps', 'stat_maps']
            ]
        for dir_ in dirs:
            if not os.path.exists(dir_):
                os.makedirs(dir_)
        print(write_dir)

        # iterate across contrasts
        for contrast in contrasts:
            print('fixed effects for contrast %s. ' % contrast)
            if surface:
                from nibabel.gifti import write
                for side in ['lh', 'rh']:
                    effect_size_maps, effect_variance_maps, data_available =\
                        _load_summary_stats(
                            subject_dic['output_dir'],
                            np.unique(session_paradigm),
                            contrast,
                            data_available=True, side=side, lowres=lowres)
                    if not data_available:
                        raise ValueError('Missing texture stats files for '
                                         'fixed effects computations')
                    ffx_effects, ffx_variance, ffx_stat = fixed_effects_surf(
                        effect_size_maps, effect_variance_maps)
                    write(
                        ffx_effects,
                        os.path.join(
                            write_dir,
                            'effect_surf/%s_%s.gii' % (contrast, side)))
                    write(
                        ffx_effects,
                        os.path.join(
                            write_dir,
                            'variance_surf/%s_%s.gii' % (contrast, side)))
                    write(
                        ffx_stat,
                        os.path.join(write_dir,
                                     'stat_surf/%s_%s.gii' % (contrast, side)))
            else:
                effect_size_maps, effect_variance_maps, data_available =\
                    _load_summary_stats(
                        subject_dic['output_dir'], session_paradigm, contrast,
                        data_available=True)
                shape = load(effect_size_maps[0]).shape
                if len(shape) > 3:
                    if shape[3] > 1:  # F contrast, skipping
                        continue
                ffx_effects, ffx_variance, ffx_stat = fixed_effects_img(
                    effect_size_maps, effect_variance_maps, mask_img)
                save(
                    ffx_effects,
                    os.path.join(write_dir,
                                 'effect_size_maps/%s.nii.gz' % contrast))
                save(
                    ffx_variance,
                    os.path.join(write_dir,
                                 'effect_variance_maps/%s.nii.gz' % contrast))
                save(ffx_stat,
                     os.path.join(write_dir, 'stat_maps/%s.nii.gz' % contrast))
                plot_stat_map(ffx_stat,
                              bg_img=subject_dic['anat'],
                              display_mode='z',
                              dim=0,
                              cut_coords=7,
                              title=contrast,
                              threshold=3.0,
                              output_file=os.path.join(
                                  write_dir, 'stat_maps/%s.png' % contrast))
        print 'shape of gift data: {} '.format(g_data.shape)

        # extract the profile of the seed region and compute the profile project on the surface
        sum = g_data.sum(axis=1)
        idx = np.where(sum!=0)[0]

        # compare the seef.gii with the projection:
        nb_sumprofil = len(idx)
        nb_vertex_seedroi = len(np.flatnonzero(seedroi_gii_data))
        print ('extract seed region: number of vertex {}'.format(nb_sumprofil))
        print ('number of vertex in seed gifti file:{}'.format(nb_vertex_seedroi))

        print('correct seedroi_gii surface mask')
        seedroi_gii.darrays[0].data[:]=0
        seedroi_gii.darrays[0].data[idx]=1
        ng.write(seedroi_gii, seedroi_gii_path)

        connmat_proj = g_data[idx,:]

        print("shape of the surfacic connmat :{}".format(connmat_proj.shape))

        # save the projected connmat
        jl.dump(connmat_proj, connmat_proj_path, compress=3)
        print "surfacic_connectivity_profile saved: \n %s " %connmat_proj_path

#       check the seedroi gifti file again to make sure the number of vertex correspond to the surfacic matrix
        print('new seed gifti file shape:{}'.format(ng.read(seedroi_gii_path).darrays[0].data.shape))