def mri_surf2surf(data, source_subj, target_subj, hemi, subjects_dir=None): """Uses freesurfer mri_surf2surf to transfer vertex data between two freesurfer subjects Parameters ========== data: ndarray, shape=(n_imgs, n_verts) data arrays representing vertex data source_subj: str freesurfer subject name of source subject target_subj: str freesurfer subject name of target subject hemi: str in ("lh", "rh") string indicating hemisphere. Notes ===== Requires path to mri_surf2surf or freesurfer environment to be active. """ data_arrays = [gifti.GiftiDataArray(d) for d in data] gifti_image = gifti.GiftiImage(darrays=data_arrays) tf_in = NamedTemporaryFile(suffix=".gii") nibabel.save(gifti_image, tf_in.name) tf_out = NamedTemporaryFile(suffix='.gii') cmd = _mri_surf2surf_command(source_subj, target_subj, tf_in.name, tf_out.name, hemi) if subjects_dir is not None: env = os.environ.copy() env['SUBJECTS_DIR'] = subjects_dir else: env = None print('Calling:') print(' '.join(cmd)) p = sp.Popen(cmd, env=env) exit_code = p.wait() if exit_code != 0: if exit_code == 255: raise Exception( ("Missing file (see above). " "If lh.sphere.reg is missing,\n" "you likely need to run the 3rd " "stage of freesurfer autorecon\n" "(sphere registration) for this subject:\n" ">>> cortex.freesurfer.autorecon('{fs_subject}', type='3')" ).format(fs_subject=source_subj)) #from subprocess import CalledProcessError # handle with this, maybe? raise Exception(("Exit code {exit_code} means that " "mri_surf2surf failed").format(exit_code=exit_code)) tf_in.close() output_img = nibabel.load(tf_out.name) output_data = np.array([da.data for da in output_img.darrays]) tf_out.close() return output_data
def write(self, gifti_filename, scalar_arr=None, **kwargs): """ Writes a surface to a surface gifti file. :param gifti_filename: output filename :param scalar_arr: optionally include a scalar array with same length as number of vertices (as expected by FSL's probtrackX) :param kwargs: any keywords are added to the meta information in the GIFTI file """ from . import cortical_mesh use_kwargs = { 'Date': str(datetime.datetime.now()), 'encoding': 'XML', 'GeometricType': 'Anatomical' } use_kwargs.update(cortical_mesh.BrainStructure('Other').gifti) use_kwargs.update(kwargs) meta = gifti.GiftiMetaData.from_dict(use_kwargs) img = gifti.GiftiImage(meta=meta) for arr, intent, dtype in zip([self.vertices, self.faces], ['pointset', 'triangle'], ['f4', 'i4']): img.add_gifti_data_array( gifti.GiftiDataArray(arr.T.astype(dtype), intent, meta=meta.metadata)) if scalar_arr is not None: img.add_gifti_data_array( gifti.GiftiDataArray(scalar_arr.astype('f4'), intent='shape', meta=meta.metadata)) for da in img.darrays: da.encoding = 2 # Base64Binary nib.save(img, gifti_filename)
def test_load_surf_mesh_file_gii(tmp_path): # Test the loader `load_surf_mesh` # If nibabel is of older version we skip tests as nibabel does not # support intent argument and intent codes are not handled properly with # older versions if not LooseVersion(nb.__version__) >= LooseVersion('2.1.0'): raise pytest.skip('Nibabel version too old to handle intent codes') mesh = generate_surf() # test if correct gii is loaded into correct list fd_mesh, filename_gii_mesh = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_mesh) coord_array = gifti.GiftiDataArray(data=mesh[0], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_POINTSET']) face_array = gifti.GiftiDataArray(data=mesh[1], intent=nb.nifti1.intent_codes[ 'NIFTI_INTENT_TRIANGLE']) gii = gifti.GiftiImage(darrays=[coord_array, face_array]) gifti.write(gii, filename_gii_mesh) assert_array_equal(load_surf_mesh(filename_gii_mesh)[0], mesh[0]) assert_array_equal(load_surf_mesh(filename_gii_mesh)[1], mesh[1]) os.remove(filename_gii_mesh) # test if incorrect gii raises error fd_no, filename_gii_mesh_no_point = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_no) gifti.write(gifti.GiftiImage(darrays=[face_array, face_array]), filename_gii_mesh_no_point) with pytest.raises(ValueError, match='NIFTI_INTENT_POINTSET'): load_surf_mesh(filename_gii_mesh_no_point) os.remove(filename_gii_mesh_no_point) fd_face, filename_gii_mesh_no_face = tempfile.mkstemp(suffix='.gii', dir=str(tmp_path)) os.close(fd_face) gifti.write(gifti.GiftiImage(darrays=[coord_array, coord_array]), filename_gii_mesh_no_face) with pytest.raises(ValueError, match='NIFTI_INTENT_TRIANGLE'): load_surf_mesh(filename_gii_mesh_no_face) os.remove(filename_gii_mesh_no_face)
def save_texture(filename, data): from nibabel import gifti import codecs darray = gifti.GiftiDataArray(data) gii = gifti.GiftiImage(darrays=[darray]) f = codecs.open(filename, 'wb') f.write(gii.to_xml(enc='utf-8')) f.close()
def writegifti(filename, polydata, refniifn): refnii = radiological.load(refniifn) points, polys = polydata_to_points_polys(polydata, True) scaledpoints4 = np.hstack([ points / refnii.header.get_zooms()[0:3], np.ones(points.shape[0])[:, None] ]) transformedpoints = refnii.affine.dot( scaledpoints4.transpose())[0:3, :].transpose() # Not setting xfm as nifti mat has already been applied - is this the right way to do it? giftipoints = gii.GiftiDataArray(data=transformedpoints, intent='NIFTI_INTENT_POINTSET', datatype='NIFTI_TYPE_FLOAT32', encoding='GIFTI_ENCODING_ASCII') giftipolys = gii.GiftiDataArray(data=polys, intent='NIFTI_INTENT_TRIANGLE', datatype='NIFTI_TYPE_INT32', encoding='GIFTI_ENCODING_ASCII') giftimesh = gii.GiftiImage(darrays=[giftipoints, giftipolys]) giftimesh.to_filename(filename)