Ejemplo n.º 1
0
def test_load_surf_data_file_glob():

    data2D = np.ones((20, 3))
    fnames = []
    for f in range(3):
        fnames.append(tempfile.mktemp(prefix='glob_%s_' % f, suffix='.gii'))
        data2D[:, f] *= f
        if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
            darray = gifti.GiftiDataArray(data=data2D[:, f])
        else:
            # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
            # initialized properly:
            darray = gifti.GiftiDataArray.from_array(data2D[:, f],
                                                     intent='t test')
        gii = gifti.GiftiImage(darrays=[darray])
        gifti.write(gii, fnames[f])

    assert_array_equal(load_surf_data(os.path.join(os.path.dirname(fnames[0]),
                                                   "glob*.gii")), data2D)

    # make one more gii file that has more than one dimension
    fnames.append(tempfile.mktemp(prefix='glob_3_', suffix='.gii'))
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray1 = gifti.GiftiDataArray(data=np.ones((20, )))
        darray2 = gifti.GiftiDataArray(data=np.ones((20, )))
        darray3 = gifti.GiftiDataArray(data=np.ones((20, )))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray1 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
        darray2 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
        darray3 = gifti.GiftiDataArray.from_array(np.ones((20, )),
                                                  intent='t test')
    gii = gifti.GiftiImage(darrays=[darray1, darray2, darray3])
    gifti.write(gii, fnames[-1])

    data2D = np.concatenate((data2D, np.ones((20, 3))), axis=1)
    assert_array_equal(load_surf_data(os.path.join(os.path.dirname(fnames[0]),
                                                   "glob*.gii")), data2D)

    # make one more gii file that has a different shape in axis=0
    fnames.append(tempfile.mktemp(prefix='glob_4_', suffix='.gii'))
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray = gifti.GiftiDataArray(data=np.ones((15, 1)))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray = gifti.GiftiDataArray.from_array(np.ones((15, 1)),
                                                 intent='t test')
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, fnames[-1])

    assert_raises_regex(ValueError,
                        'files must contain data with the same shape',
                        load_surf_data,
                        os.path.join(os.path.dirname(fnames[0]), "*.gii"))
    for f in fnames:
        os.remove(f)
Ejemplo n.º 2
0
def mesh_from_arrays(coord, triangles, path=None):
    """ Create a mesh object from two arrays

    fixme:  intent should be set !
    """
    carray = gifti.GiftiDataArray().from_array(coord.astype(np.float32),
                                               "NIFTI_INTENT_POINTSET",
                                               encoding='B64BIN')
    #endian="LittleEndian")
    tarray = gifti.GiftiDataArray().from_array(triangles.astype(np.int32),
                                               "NIFTI_INTENT_TRIANGLE",
                                               encoding='B64BIN')
    #endian="LittleEndian")
    img = gifti.GiftiImage(darrays=[carray, tarray])
    if path is not None:
        try:
            from soma import aims
            mesh = aims.AimsTimeSurface(3)
            mesh.vertex().assign([aims.Point3df(x) for x in coord])
            mesh.polygon().assign(
                [aims.AimsVector_U32_3(x) for x in triangles])
            aims.write(mesh, path)
        except:
            print("soma writing failed")
            gifti.write(img, path)
    return img
Ejemplo n.º 3
0
    def write(self, gifti_filename, scalar_arr=None, **kwargs):
        """
        Writes a surface to a surface gifti file.

        :param gifti_filename: output filename
        :param scalar_arr: optionally include a scalar array with same length as number of vertices (as expected by FSL's probtrackX)
        :param kwargs: any keywords are added to the meta information in the GIFTI file
        """
        from . import cortical_mesh
        use_kwargs = {
            'Date': str(datetime.datetime.now()),
            'encoding': 'XML',
            'GeometricType': 'Anatomical'
        }
        use_kwargs.update(cortical_mesh.BrainStructure('Other').gifti)
        use_kwargs.update(kwargs)
        meta = gifti.GiftiMetaData.from_dict(use_kwargs)
        img = gifti.GiftiImage(meta=meta)
        for arr, intent, dtype in zip([self.vertices, self.faces],
                                      ['pointset', 'triangle'], ['f4', 'i4']):
            img.add_gifti_data_array(
                gifti.GiftiDataArray(arr.T.astype(dtype),
                                     intent,
                                     meta=meta.metadata))
        if scalar_arr is not None:
            img.add_gifti_data_array(
                gifti.GiftiDataArray(scalar_arr.astype('f4'),
                                     intent='shape',
                                     meta=meta.metadata))
        for da in img.darrays:
            da.encoding = 2  # Base64Binary
        nib.save(img, gifti_filename)
Ejemplo n.º 4
0
    def _run_interface(self, runtime):
        import os.path as op
        import nibabel.gifti as ng
        import numpy as np
        import skimage.measure as sm
        import nilearn.image as nimg

        import slam.io as sio
        import slam.differential_geometry as sdg

        from nipype.utils.filemanip import split_filename

        # Generate output mesh filename from the input image name
        _, fname, _ = split_filename(self.inputs.image_file)
        gii_file = op.abspath(op.join(runtime.cwd, fname + ".gii"))

        # Load the largest connected component of the input image
        img = nimg.largest_connected_component_img(self.inputs.image_file)

        # TODO: check if the input image is correct (binary)

        # Run the marching cube algorithm
        verts, faces, normals, values = sm.marching_cubes_lewiner(
            img.get_data(), self.inputs.level)

        # Convert vertices coordinates to image space
        # TODO: check that is correct by plotting the mesh on the image
        x, y, z = nimg.coord_transform(verts[:, 0], verts[:, 1], verts[:, 2],
                                       img.affine)
        mm_verts = np.array([x, y, z]).T

        # Save the mesh as Gifti
        # FIXME: FreeView can not open the mesh (but anatomist do)
        gii = ng.GiftiImage(darrays=[
            ng.GiftiDataArray(mm_verts, intent='NIFTI_INTENT_POINTSET'),
            ng.GiftiDataArray(faces, intent='NIFTI_INTENT_TRIANGLE')
        ])
        gii.meta = ng.GiftiMetaData().from_dict({
            "volume_file":
            self.inputs.image_file,
            "marching_cube_level":
            str(self.inputs.level),
            "smoothing_iterations":
            str(self.inputs.smoothing_iter),
            "smoothing_dt":
            str(self.inputs.smoothing_dt)
        })
        ng.write(gii, gii_file)

        # Optional: Smooth the marching cube output with SLAM
        if self.inputs.smoothing_iter > 0:
            mesh = sdg.laplacian_mesh_smoothing(
                sio.load_mesh(gii_file),
                nb_iter=self.inputs.smoothing_iter,
                dt=self.inputs.smoothing_dt)
            sio.write_mesh(mesh, gii_file)

        return runtime
Ejemplo n.º 5
0
def test_domain_from_mesh():
    """Test domain_from_mesh method
    """
    coords = np.array([[0., 0., 0.], [0., 0., 1.], [0., 1., 0.], [1., 0., 0.]])
    triangles = np.asarray([[0, 1, 2], [0, 1, 3], [0, 2, 3], [1, 2, 3]])
    darrays = [nbg.GiftiDataArray(coords)] + [nbg.GiftiDataArray(triangles)]
    toy_image = nbg.GiftiImage(darrays=darrays)
    domain = domain_from_mesh(toy_image)
    # if we get there, we could build the domain, and that's what we wanted.
    assert_equal(domain.get_coord(), coords)
Ejemplo n.º 6
0
def test_load_surf_data_file_glob(tmp_path):

    data2D = np.ones((20, 3))
    fnames = []
    for f in range(3):
        fd, filename = tempfile.mkstemp(prefix='glob_%s_' % f,
                                        suffix='.gii',
                                        dir=str(tmp_path))
        os.close(fd)
        fnames.append(filename)
        data2D[:, f] *= f
        darray = gifti.GiftiDataArray(data=data2D[:, f])
        gii = gifti.GiftiImage(darrays=[darray])
        gifti.write(gii, fnames[f])

    assert_array_equal(load_surf_data(
        os.path.join(os.path.dirname(fnames[0]), "glob*.gii")),
        data2D
    )

    # make one more gii file that has more than one dimension
    fd, filename = tempfile.mkstemp(prefix='glob_3_',
                                    suffix='.gii',
                                    dir=str(tmp_path))
    os.close(fd)
    fnames.append(filename)
    darray1 = gifti.GiftiDataArray(data=np.ones((20, )))
    darray2 = gifti.GiftiDataArray(data=np.ones((20, )))
    darray3 = gifti.GiftiDataArray(data=np.ones((20, )))
    gii = gifti.GiftiImage(darrays=[darray1, darray2, darray3])
    gifti.write(gii, fnames[-1])

    data2D = np.concatenate((data2D, np.ones((20, 3))), axis=1)
    assert_array_equal(load_surf_data(os.path.join(os.path.dirname(fnames[0]),
                                                   "glob*.gii")), data2D)

    # make one more gii file that has a different shape in axis=0
    fd, filename = tempfile.mkstemp(prefix='glob_4_',
                                    suffix='.gii',
                                    dir=str(tmp_path))
    os.close(fd)
    fnames.append(filename)
    darray = gifti.GiftiDataArray(data=np.ones((15, 1)))
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, fnames[-1])

    with pytest.raises(ValueError,
                       match='files must contain data with the same shape'
                       ):
        load_surf_data(os.path.join(os.path.dirname(fnames[0]), "*.gii"))
    for f in fnames:
        os.remove(f)
Ejemplo n.º 7
0
def test_gifti_img_to_mesh():
    mesh = generate_surf()

    coord_array = gifti.GiftiDataArray(data=mesh[0])
    coord_array.intent = nb.nifti1.intent_codes['NIFTI_INTENT_POINTSET']

    face_array = gifti.GiftiDataArray(data=mesh[1])
    face_array.intent = nb.nifti1.intent_codes['NIFTI_INTENT_TRIANGLE']

    gii = gifti.GiftiImage(darrays=[coord_array, face_array])
    coords, faces = _gifti_img_to_mesh(gii)
    assert_array_equal(coords, mesh[0])
    assert_array_equal(faces, mesh[1])
Ejemplo n.º 8
0
def main(derivatives, subject, session):

    for hemi in ['lh', 'rh']:
        template = op.join(
            derivatives, 'sampled_giis', f'sub-{subject}', f'ses-{session}',
            'func',
            f'sub-{subject}_ses-{session}_left_over_right_desc-zmap-depth-*_hemi-{hemi}.gii'
        )
        print(template)
        zmaps = glob.glob(template)

        print(zmaps)

        mean_zmap = np.mean([nb.load(fn).darrays[0].data for fn in zmaps], 0)
        zmap_im = nb.load(zmaps[0])
        mean_zmap_im = gifti.GiftiImage(header=zmap_im.header,
                                        extra=zmap_im.extra)
        mean_zmap_im.add_gifti_data_array(gifti.GiftiDataArray(mean_zmap))
        mean_zmap_im.to_filename(
            op.join(
                derivatives, 'sampled_giis', f'sub-{subject}',
                f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_left_over_right_desc-zmap-depth-all_hemi-{hemi}.gii'
            ))

        mean_zmap_abs = np.abs(mean_zmap)
        mean_zmap_abs_im = gifti.GiftiImage(header=zmap_im.header,
                                            extra=zmap_im.extra)
        mean_zmap_abs_im.add_gifti_data_array(
            gifti.GiftiDataArray(mean_zmap_abs))
        mean_zmap_abs_im.to_filename(
            op.join(
                derivatives, 'sampled_giis', f'sub-{subject}',
                f'ses-{session}', 'func',
                f'sub-{subject}_ses-{session}_left_over_right_desc-abszmap-depth-all_hemi-{hemi}.gii'
            ))

        os.environ['SUBJECTS_DIR'] = op.join(derivatives, 'freesurfer')
        smoother = freesurfer.SurfaceSmooth()
        smoother.inputs.in_file = mean_zmap_abs_im.get_filename()
        smoother.inputs.fwhm = 2.0
        smoother.inputs.subject_id = f'sub-{subject}'
        smoother.inputs.hemi = hemi
        smoother.inputs.out_file = op.join(
            derivatives, 'sampled_giis', f'sub-{subject}', f'ses-{session}',
            'func',
            f'sub-{subject}_ses-{session}_left_over_right_desc-abszmap-depth-all_hemi-{hemi}_smoothed.gii'
        )

        r = smoother.run()
Ejemplo n.º 9
0
def test_load_surf_data_file_nii_gii():
    # test loading of fake data from gifti file
    filename_gii = tempfile.mktemp(suffix='.gii')
    if LooseVersion(nb.__version__) > LooseVersion('2.0.2'):
        darray = gifti.GiftiDataArray(data=np.zeros((20, )))
    else:
        # Avoid a bug in nibabel 1.2.0 where GiftiDataArray were not
        # initialized properly:
        darray = gifti.GiftiDataArray.from_array(np.zeros((20, )),
                                                 intent='t test')
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, filename_gii)
    assert_array_equal(load_surf_data(filename_gii), np.zeros((20, )))
    os.remove(filename_gii)

    # test loading of data from empty gifti file
    filename_gii_empty = tempfile.mktemp(suffix='.gii')
    gii_empty = gifti.GiftiImage()
    gifti.write(gii_empty, filename_gii_empty)
    assert_raises_regex(ValueError, 'must contain at least one data array',
                        load_surf_data, filename_gii_empty)
    os.remove(filename_gii_empty)

    # test loading of fake data from nifti file
    filename_nii = tempfile.mktemp(suffix='.nii')
    filename_niigz = tempfile.mktemp(suffix='.nii.gz')
    nii = nb.Nifti1Image(np.zeros((20, )), affine=None)
    nb.save(nii, filename_nii)
    nb.save(nii, filename_niigz)
    assert_array_equal(load_surf_data(filename_nii), np.zeros((20, )))
    assert_array_equal(load_surf_data(filename_niigz), np.zeros((20, )))
    os.remove(filename_nii)
    os.remove(filename_niigz)
Ejemplo n.º 10
0
def mri_surf2surf(data, source_subj, target_subj, hemi, subjects_dir=None):
    """Uses freesurfer mri_surf2surf to transfer vertex data between
        two freesurfer subjects
    
    Parameters
    ==========
    data: ndarray, shape=(n_imgs, n_verts)
        data arrays representing vertex data
    
    source_subj: str
        freesurfer subject name of source subject
    
    target_subj: str
        freesurfer subject name of target subject
    
    hemi: str in ("lh", "rh")
        string indicating hemisphere.
    
    Notes
    =====
    Requires path to mri_surf2surf or freesurfer environment to be active.
    """
    data_arrays = [gifti.GiftiDataArray(d) for d in data]
    gifti_image = gifti.GiftiImage(darrays=data_arrays)

    tf_in = NamedTemporaryFile(suffix=".gii")
    nibabel.save(gifti_image, tf_in.name)

    tf_out = NamedTemporaryFile(suffix='.gii')
    cmd = _mri_surf2surf_command(source_subj, target_subj, tf_in.name,
                                 tf_out.name, hemi)
    if subjects_dir is not None:
        env = os.environ.copy()
        env['SUBJECTS_DIR'] = subjects_dir
    else:
        env = None

    print('Calling:')
    print(' '.join(cmd))
    p = sp.Popen(cmd, env=env)
    exit_code = p.wait()
    if exit_code != 0:
        if exit_code == 255:
            raise Exception(
                ("Missing file (see above). "
                 "If lh.sphere.reg is missing,\n"
                 "you likely need to run the 3rd "
                 "stage of freesurfer autorecon\n"
                 "(sphere registration) for this subject:\n"
                 ">>> cortex.freesurfer.autorecon('{fs_subject}', type='3')"
                 ).format(fs_subject=source_subj))
        #from subprocess import CalledProcessError # handle with this, maybe?
        raise Exception(("Exit code {exit_code} means that "
                         "mri_surf2surf failed").format(exit_code=exit_code))

    tf_in.close()
    output_img = nibabel.load(tf_out.name)
    output_data = np.array([da.data for da in output_img.darrays])
    tf_out.close()
    return output_data
Ejemplo n.º 11
0
def test_load_surf_mesh_file_gii(tmp_path):
    # Test the loader `load_surf_mesh`

    # If nibabel is of older version we skip tests as nibabel does not
    # support intent argument and intent codes are not handled properly with
    # older versions

    if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'):
        raise pytest.skip('Nibabel version too old to handle intent codes')

    mesh = generate_surf()

    # test if correct gii is loaded into correct list
    fd_mesh, filename_gii_mesh = tempfile.mkstemp(suffix='.gii',
                                                  dir=str(tmp_path))
    os.close(fd_mesh)
    coord_array = gifti.GiftiDataArray(data=mesh[0],
                                       intent=nb.nifti1.intent_codes[
                                           'NIFTI_INTENT_POINTSET'])
    face_array = gifti.GiftiDataArray(data=mesh[1],
                                      intent=nb.nifti1.intent_codes[
                                          'NIFTI_INTENT_TRIANGLE'])

    gii = gifti.GiftiImage(darrays=[coord_array, face_array])
    gifti.write(gii, filename_gii_mesh)
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0])
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1])
    os.remove(filename_gii_mesh)

    # test if incorrect gii raises error
    fd_no, filename_gii_mesh_no_point = tempfile.mkstemp(suffix='.gii',
                                                         dir=str(tmp_path))
    os.close(fd_no)
    gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]),
                filename_gii_mesh_no_point)
    with pytest.raises(ValueError, match='NIFTI_INTENT_POINTSET'):
        load_surf_mesh(filename_gii_mesh_no_point)
    os.remove(filename_gii_mesh_no_point)

    fd_face, filename_gii_mesh_no_face = tempfile.mkstemp(suffix='.gii',
                                                          dir=str(tmp_path))
    os.close(fd_face)
    gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]),
                filename_gii_mesh_no_face)
    with pytest.raises(ValueError, match='NIFTI_INTENT_TRIANGLE'):
        load_surf_mesh(filename_gii_mesh_no_face)
    os.remove(filename_gii_mesh_no_face)
Ejemplo n.º 12
0
def save_texture(filename, data):
    from nibabel import gifti
    import codecs
    darray = gifti.GiftiDataArray(data)
    gii = gifti.GiftiImage(darrays=[darray])
    f = codecs.open(filename, 'wb')
    f.write(gii.to_xml(enc='utf-8'))
    f.close()
Ejemplo n.º 13
0
def bad_gifti():

    import nibabel.gifti as ngif

    g = ngif.GiftiImage()
    coords = np.random.uniform(size=(20, 3))
    trigs = np.random.randint(low=0, high=100, size=(5, 3))

    struct = ngif.GiftiNVPairs(name="AnatomicalStructurePrimary",
                               value="CortexLeft")
    meta = ngif.GiftiMetaData(struct)
    darray_coords = ngif.GiftiDataArray(data=coords,
                                        intent='NIFTI_INTENT_POINTSET',
                                        meta=meta)
    darray_trigs = ngif.GiftiDataArray(data=trigs, intent=1009)
    g.add_gifti_data_array(darray_coords)
    g.add_gifti_data_array(darray_trigs)
    return g
Ejemplo n.º 14
0
def test_load_surf_mesh_file_gii():
    # Test the loader `load_surf_mesh`

    # If nibabel is of older version we skip tests as nibabel does not
    # support intent argument and intent codes are not handled properly with
    # older versions

    if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'):
        raise SkipTest

    mesh = generate_surf()

    # test if correct gii is loaded into correct list
    filename_gii_mesh = tempfile.mktemp(suffix='.gii')

    coord_array = gifti.GiftiDataArray(data=mesh[0],
                                       intent=nb.nifti1.intent_codes[
                                           'NIFTI_INTENT_POINTSET'])
    face_array = gifti.GiftiDataArray(data=mesh[1],
                                      intent=nb.nifti1.intent_codes[
                                          'NIFTI_INTENT_TRIANGLE'])

    gii = gifti.GiftiImage(darrays=[coord_array, face_array])
    gifti.write(gii, filename_gii_mesh)
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0])
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1])
    os.remove(filename_gii_mesh)

    # test if incorrect gii raises error
    filename_gii_mesh_no_point = tempfile.mktemp(suffix='.gii')
    gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]),
                filename_gii_mesh_no_point)
    assert_raises_regex(ValueError, 'NIFTI_INTENT_POINTSET',
                        load_surf_mesh, filename_gii_mesh_no_point)
    os.remove(filename_gii_mesh_no_point)

    filename_gii_mesh_no_face = tempfile.mktemp(suffix='.gii')
    gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]),
                filename_gii_mesh_no_face)
    assert_raises_regex(ValueError, 'NIFTI_INTENT_TRIANGLE',
                        load_surf_mesh, filename_gii_mesh_no_face)
    os.remove(filename_gii_mesh_no_face)
Ejemplo n.º 15
0
def test_load_surf_mesh_file_gii(tmp_path):
    # Test the loader `load_surf_mesh`
    mesh = generate_surf()

    # test if correct gii is loaded into correct list
    fd_mesh, filename_gii_mesh = tempfile.mkstemp(suffix='.gii',
                                                  dir=str(tmp_path))
    os.close(fd_mesh)
    coord_array = gifti.GiftiDataArray(data=mesh[0],
                                       intent=nb.nifti1.intent_codes[
                                           'NIFTI_INTENT_POINTSET'])
    face_array = gifti.GiftiDataArray(data=mesh[1],
                                      intent=nb.nifti1.intent_codes[
                                          'NIFTI_INTENT_TRIANGLE'])

    gii = gifti.GiftiImage(darrays=[coord_array, face_array])
    gifti.write(gii, filename_gii_mesh)
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0])
    assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1])
    os.remove(filename_gii_mesh)

    # test if incorrect gii raises error
    fd_no, filename_gii_mesh_no_point = tempfile.mkstemp(suffix='.gii',
                                                         dir=str(tmp_path))
    os.close(fd_no)
    gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]),
                filename_gii_mesh_no_point)
    with pytest.raises(ValueError, match='NIFTI_INTENT_POINTSET'):
        load_surf_mesh(filename_gii_mesh_no_point)
    os.remove(filename_gii_mesh_no_point)

    fd_face, filename_gii_mesh_no_face = tempfile.mkstemp(suffix='.gii',
                                                          dir=str(tmp_path))
    os.close(fd_face)
    gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]),
                filename_gii_mesh_no_face)
    with pytest.raises(ValueError, match='NIFTI_INTENT_TRIANGLE'):
        load_surf_mesh(filename_gii_mesh_no_face)
    os.remove(filename_gii_mesh_no_face)
Ejemplo n.º 16
0
def writegifti(filename, polydata, refniifn):
    refnii = radiological.load(refniifn)

    points, polys = polydata_to_points_polys(polydata, True)
    scaledpoints4 = np.hstack([
        points / refnii.header.get_zooms()[0:3],
        np.ones(points.shape[0])[:, None]
    ])
    transformedpoints = refnii.affine.dot(
        scaledpoints4.transpose())[0:3, :].transpose()

    # Not setting xfm as nifti mat has already been applied - is this the right way to do it?
    giftipoints = gii.GiftiDataArray(data=transformedpoints,
                                     intent='NIFTI_INTENT_POINTSET',
                                     datatype='NIFTI_TYPE_FLOAT32',
                                     encoding='GIFTI_ENCODING_ASCII')
    giftipolys = gii.GiftiDataArray(data=polys,
                                    intent='NIFTI_INTENT_TRIANGLE',
                                    datatype='NIFTI_TYPE_INT32',
                                    encoding='GIFTI_ENCODING_ASCII')

    giftimesh = gii.GiftiImage(darrays=[giftipoints, giftipolys])
    giftimesh.to_filename(filename)
Ejemplo n.º 17
0
def save_texture(filename, data):
    try:
        from nibabel import gifti
        import codecs
        darray = gifti.GiftiDataArray(data)
        gii = gifti.GiftiImage(darrays=[darray])
        f = codecs.open(filename, 'wb')  #, encoding='utf-8')
        f.write(gii.to_xml(enc='utf-8'))
        f.close()
    except:
        from soma import aims
        tex = aims.TimeTexture('FLOAT')
        tex[0].assign(data)
        aims.write(tex, filename)
def gii_convert_to_texture(gii_f, out_gii_f=None, meta=None, verbose=0):
    """ Change intent of first data array and create a new Gifti """
    # TODO: verify the that description is good and function is general
    orig_gii = ng.read(gii_f)

    data = orig_gii.darrays[len(orig_gii.darrays) - 1].data[0]
    darray = ng.GiftiDataArray(data=data, intent='NIFTI_INTENT_ESTIMATE')
    gii = ng.GiftiImage(darrays=[darray])

    if meta:
        gii.meta = ng.GiftiMetaData().from_dict(meta)

    out_f = out_gii_f if out_gii_f else gii_f
    ng.write(gii, out_f)

    if verbose > 0:
        print("Texture saved at: {}".format(out_f))
Ejemplo n.º 19
0
def main(bids_folder='/data/ds-risk'):

    target_dir = op.join(bids_folder, 'derivatives', 'npc_com')
    if not op.exists(target_dir):
        os.makedirs(target_dir)

    subjects = get_all_subjects()

    df = []
    for subject in subjects:
        coords = get_npcr_coordinate(subject, bids_folder=bids_folder)

        df.append({'subject': subject, 'x': coords[0], 'y': coords[1]})

        surf = cortex.Surface(*cortex.db.get_surf(
            'fsaverage', 'flat', merge=False, hemisphere='right'))

        ball = surf.get_euclidean_ball(coords, radius=5).astype(float)

        darrays = [gifti.GiftiDataArray(ball)]
        image = gifti.GiftiImage(darrays=darrays)

        fn = op.join(
            target_dir,
            f'sub-{subject}_space-fsaverage_desc-npcr_hemi-R_com.gii')
        image.to_filename(fn)

        fsnative_fn = op.join(
            target_dir, f'sub-{subject}_space-fsnative-npcr_hemi-R_com.gii')
        transform_data(fn,
                       'fsaverage',
                       bids_folder,
                       target_fn=fsnative_fn,
                       target_subject=f'sub-{subject}')

        print(ball)

    df = pd.DataFrame(df)

    df.set_index('subject').to_csv(op.join(target_dir, 'coms.tsv'), sep='\t')
Ejemplo n.º 20
0
def test_load_surf_data_file_nii_gii(tmp_path):
    # test loading of fake data from gifti file
    fd_gii, filename_gii = tempfile.mkstemp(suffix='.gii',
                                            dir=str(tmp_path))
    os.close(fd_gii)
    darray = gifti.GiftiDataArray(data=np.zeros((20, )))
    gii = gifti.GiftiImage(darrays=[darray])
    gifti.write(gii, filename_gii)
    assert_array_equal(load_surf_data(filename_gii), np.zeros((20, )))
    os.remove(filename_gii)

    # test loading of data from empty gifti file
    fd_empty, filename_gii_empty = tempfile.mkstemp(suffix='.gii',
                                                    dir=str(tmp_path))
    os.close(fd_empty)
    gii_empty = gifti.GiftiImage()
    gifti.write(gii_empty, filename_gii_empty)
    with pytest.raises(ValueError,
                       match='must contain at least one data array'
                       ):
        load_surf_data(filename_gii_empty)
    os.remove(filename_gii_empty)

    # test loading of fake data from nifti file
    fd_gii2, filename_nii = tempfile.mkstemp(suffix='.nii',
                                             dir=str(tmp_path))
    os.close(fd_gii2)
    fd_niigz, filename_niigz = tempfile.mkstemp(suffix='.nii.gz',
                                                dir=str(tmp_path))
    os.close(fd_niigz)
    nii = nb.Nifti1Image(np.zeros((20, )), affine=None)
    nb.save(nii, filename_nii)
    nb.save(nii, filename_niigz)
    assert_array_equal(load_surf_data(filename_nii), np.zeros((20, )))
    assert_array_equal(load_surf_data(filename_niigz), np.zeros((20, )))
    os.remove(filename_nii)
    os.remove(filename_niigz)
Ejemplo n.º 21
0
def write_gifti(filename,
                arr_list,
                brain_structure,
                intent_list=None,
                color_map=None,
                meta_list=None,
                **kwargs):
    """
    Writes data to a GIFTI file

    :param filename: output filename
    :param arr_list: list of arrays to be stored
    :param brain_structure: 'CortexLeft' or 'CortexRight'
    :param intent_list: intent of each array (list of same length as arr_list)
    :param color_map: None for non-label giftis, 'default' for default qualitative colour map, dict mapping value to RGBA values otherwise
    :param meta_list: list of dictionaries with the array metadata
    :param kwargs: additional values to be stored to the meta data
    """
    logger.info('writing to %s as GIFTI' % filename)
    if intent_list is None:
        intent_list = [
            'NIFTI_INTENT_NONE' if color_map is None else 'NIFTI_INTENT_LABEL'
        ] * len(arr_list)
    if meta_list is None:
        meta_list = [{} for _ in arr_list]
    if len(intent_list) != len(arr_list):
        raise ValueError("Number of intents does not match number of arrays")

    if isinstance(brain_structure, str):
        brain_structure = BrainStructure.from_string(brain_structure,
                                                     issurface=True)
    meta_dict = brain_structure.gifti
    meta_dict.update({'Date': str(datetime.datetime.now()), 'encoding': 'XML'})
    meta_dict.update(kwargs)
    meta = gifti.GiftiMetaData.from_dict(meta_dict)

    if color_map == 'default':
        color_map = {}
    if color_map is not None:
        labels = np.unique(np.concatenate([np.unique(arr)
                                           for arr in arr_list]))
        colour_sequence = cc.glasbey

        for label in labels:
            if label not in color_map:
                color_map[label] = (str(label), next(colour_sequence))

        labeltable = gifti.GiftiLabelTable()
        for value, (text, rgba) in color_map.items():
            labeltable.labels.append(gifti.GiftiLabel(value, *rgba))
            labeltable.labels[-1].label = str(text)
    else:
        labeltable = None

    img = gifti.GiftiImage(meta=meta, labeltable=labeltable)
    for arr, intent, arr_meta in zip(arr_list, intent_list, meta_list):
        arr_meta_dict = dict(meta_dict)
        arr_meta_dict.update(arr_meta)
        img.add_gifti_data_array(
            gifti.GiftiDataArray(correct_type(arr), intent,
                                 meta=arr_meta_dict))
    for da in img.darrays:
        da.encoding = 2  # Base64Binary
    nib.save(img, filename)
Ejemplo n.º 22
0
        # Run the marching cube algorithm
        verts, faces, normals, values = sm.marching_cubes_lewiner(
            img.get_data(), self.inputs.level)
>>>>>>> 02924efd7fd9ff93e10f0e0030a43b878812e288

        # Convert vertices coordinates to image space
        # TODO: check that is correct by plotting the mesh on the image
        x, y, z = nimg.coord_transform(
            verts[:, 0], verts[:, 1], verts[:, 2], img.affine)
        mm_verts = np.array([x, y, z]).T

        # Save the mesh as Gifti
        # FIXME: FreeView can not open the mesh (but anatomist do)
        gii = ng.GiftiImage(darrays=[
            ng.GiftiDataArray(mm_verts, intent='NIFTI_INTENT_POINTSET'),
            ng.GiftiDataArray(faces, intent='NIFTI_INTENT_TRIANGLE')])
        gii.meta = ng.GiftiMetaData().from_dict({
<<<<<<< HEAD
            "volume_file": self.input_spec.image_file,
            "marching_cube_level": self.input_spec.level,
            "smoothing_iterations": self.input_spec.smoothing_iter,
            "smoothing_dt": self.input_spec.smoothing_dt
=======
            "volume_file": self.inputs.image_file,
            "marching_cube_level": str(self.inputs.level),
            "smoothing_iterations": str(self.inputs.smoothing_iter),
            "smoothing_dt": str(self.inputs.smoothing_dt)
>>>>>>> 02924efd7fd9ff93e10f0e0030a43b878812e288
        })
        ng.write(gii, gii_file)
Ejemplo n.º 23
0
gii_labeltb = nbg.GiftiLabelTable()

for i in range(len(region_names)):
    gii_label = nbg.GiftiLabel(
        key=i,
        alpha=1,
        red=np.random.uniform(0, 1, 1)[0],
        green=np.random.uniform(0, 1, 1)[0],
        blue=np.random.uniform(0, 1, 1)[0],
    )
    gii_label.label = region_names[i]
    gii_labeltb.labels.append(gii_label)

darrays = [
    nbg.GiftiDataArray(region_map_lores.astype("int32"),
                       intent="NIFTI_INTENT_LABEL",
                       datatype=8)
]
gii_image = nbg.GiftiImage(darrays=darrays, labeltable=gii_labeltb)
nbg.giftiio.write(
    gii_image, BIDS_anat_folder + "/sub-" + participant_label +
    "_space-individual_dparc.label.gii")

# write cortical surface (i.e. source space) to file
cort_surf_path = tvb_output + "/sub-" + participant_label + "_Cortex/"
if not os.path.exists(cort_surf_path):
    os.makedirs(cort_surf_path)

# surface vertices are in ras-tkr coordinates used by freesurfer
# for them to allign with parc_image, use affine transform to bring them into ras-scanner
p = Popen(('mri_info --tkr2scanner ' + recon_all_dir + "/" + recon_all_name +