def project_fmri(input_mesh, data_file, output_tex_file, output_kernels_file=None, data_resolution=None, geod_decay=5., norm_decay=2., kernel_size=7, tex_bin_threshold=None): if output_kernels_file is None: tmp_dir = tempfile.mkdtemp(prefix='pyhrf_surf_proj', dir=pyhrf.cfg['global']['tmp_path']) kernels_file = op.join(tmp_dir, add_suffix(op.basename(data_file), '_kernels')) tmp_kernels_file = True else: kernels_file = output_kernels_file tmp_kernels_file = False if data_resolution is not None: resolution = data_resolution else: resolution = read_spatial_resolution(data_file) pyhrf.verbose(1,'Data resolution: %s' %resolution) pyhrf.verbose(2,'Projection parameters:') pyhrf.verbose(2,' - geodesic decay: %f mm' %geod_decay) pyhrf.verbose(2,' - normal decay: %f mm' %norm_decay) pyhrf.verbose(2,' - kernel size: %f voxels' %kernel_size) create_projection_kernels(input_mesh, kernels_file, resolution, geod_decay, norm_decay, kernel_size) project_fmri_from_kernels(input_mesh, kernels_file, data_file, output_tex_file, tex_bin_threshold) if tmp_kernels_file: os.remove(kernels_file)
def mesh_contour_with_files(input_mesh, input_labels, output_mesh=None, output_labels=None): """ TODO: use nibabel here """ #from gifti import loadImage, saveImage, GiftiDataArray, GiftiImage #from gifti import GiftiImage_fromarray, GiftiImage_fromTriangles #from gifti import GiftiIntentCode, GiftiEncoding labels = loadImage(input_labels).arrays[0].data.astype(int) cor, triangles = loadImage(input_mesh).arrays contour_cor, contour_tri = mesh_contour(cor.data, triangles.data.astype(int), labels) k = GiftiImage_fromarray(contour_cor) k.arrays[0].intentString = "NIFTI_INTENT_POINTSET" k.addDataArray_fromarray(contour_tri, GiftiIntentCode.NIFTI_INTENT_TRIANGLE) for a in k.arrays: a.encoding = GiftiEncoding.GIFTI_ENCODING_ASCII if output_mesh is None: output_mesh = non_existent_file(add_suffix(input_mesh, '_contour')) logger.info('saving to %s', output_mesh) k.save(output_mesh)
def get_unique_item_id(self, name, suffix_nb=0): print "name:", name if suffix_nb != 0: suffix = "(%d)" % suffix_nb else: suffix = "" candidate = add_suffix(name, suffix) print "candidate:", candidate print [str(self.ui.item_list.item(i)) for i in range(self.ui.item_list.count())]
def get_unique_item_id(self, name, suffix_nb=0): print 'name:', name if suffix_nb != 0: suffix = '(%d)' %suffix_nb else: suffix = '' candidate = add_suffix(name, suffix) print 'candidate:', candidate print [str(self.ui.item_list.item(i)) \ for i in range(self.ui.item_list.count())]
def test_split(self): # pyhrf.verbose.set_verbosity(0) pyhrf.logger.setLevel(logging.WARNING) sh = (2, 4, 4, 4) c = xndarray(np.arange(np.prod(sh)).reshape(sh), ['condition'] + MRI3Daxes, {'condition': ['audio', 'video']}) if debug: print 'Original cub:' print c.descrip() fn = op.join(self.tmp_dir, 'cub.nii') if debug: print 'Save and load original cub' c.save(fn) c = xndarray.load(fn) fn = op.join(self.tmp_dir, 'cub2.nii') if debug: print 'Save and load new cub with meta data from original cuboid' sh = (4, 4, 4) c2 = xndarray(np.arange(np.prod(sh)).reshape(sh), MRI3Daxes, meta_data=c.meta_data) c2.save(fn) c2 = xndarray.load(fn) fns = [] sub_cuboids = [] if debug: print 'Split and save sub cuboids' for dvalue, sub_c in c.split('condition').iteritems(): fn = op.join(self.tmp_dir, add_suffix('sub_c.nii', '_%s' % str(dvalue))) if debug and dvalue == 'audio': print 'fn_out:', fn print 'sub_c:' print sub_c.descrip() sub_cuboids.append(sub_c) sub_c.save(fn) fns.append(fn) if debug: print '' print 'Load sub c again ...' for fn, sub_c in zip(fns, sub_cuboids): if debug: print 'fn:', fn c_loaded = xndarray.load(fn) if debug: print 'c_loaded:' print c_loaded.descrip() self.assertEqual(c_loaded, sub_c)
def test_split(self): sh = (2, 4, 4, 4) c = xndarray( np.arange(np.prod(sh)).reshape(sh), ['condition'] + MRI3Daxes, {'condition': ['audio', 'video']}) if debug: print 'Original cub:' print c.descrip() fn = op.join(self.tmp_dir, 'cub.nii') if debug: print 'Save and load original cub' c.save(fn) c = xndarray.load(fn) fn = op.join(self.tmp_dir, 'cub2.nii') if debug: print 'Save and load new cub with meta data from original cuboid' sh = (4, 4, 4) c2 = xndarray(np.arange(np.prod(sh)).reshape(sh), MRI3Daxes, meta_data=c.meta_data) c2.save(fn) c2 = xndarray.load(fn) fns = [] sub_cuboids = [] if debug: print 'Split and save sub cuboids' for dvalue, sub_c in c.split('condition').iteritems(): fn = op.join(self.tmp_dir, add_suffix('sub_c.nii', '_%s' % str(dvalue))) if debug and dvalue == 'audio': print 'fn_out:', fn print 'sub_c:' print sub_c.descrip() sub_cuboids.append(sub_c) sub_c.save(fn) fns.append(fn) if debug: print '' print 'Load sub c again ...' for fn, sub_c in zip(fns, sub_cuboids): if debug: print 'fn:', fn c_loaded = xndarray.load(fn) if debug: print 'c_loaded:' print c_loaded.descrip() self.assertEqual(c_loaded, sub_c)
def test_split(self): # pyhrf.verbose.set_verbosity(4) sh = (2, 4, 4, 4) c = xndarray(np.arange(np.prod(sh)).reshape(sh), ["condition"] + MRI3Daxes, {"condition": ["audio", "video"]}) if debug: print "Original cub:" print c.descrip() fn = op.join(self.tmp_dir, "cub.nii") if debug: print "Save and load original cub" c.save(fn) c = xndarray.load(fn) fn = op.join(self.tmp_dir, "cub2.nii") if debug: print "Save and load new cub with meta data from original cuboid" sh = (4, 4, 4) c2 = xndarray(np.arange(np.prod(sh)).reshape(sh), MRI3Daxes, meta_data=c.meta_data) c2.save(fn) c2 = xndarray.load(fn) fns = [] sub_cuboids = [] if debug: print "Split and save sub cuboids" for dvalue, sub_c in c.split("condition").iteritems(): fn = op.join(self.tmp_dir, add_suffix("sub_c.nii", "_%s" % str(dvalue))) if debug and dvalue == "audio": print "fn_out:", fn print "sub_c:" print sub_c.descrip() sub_cuboids.append(sub_c) sub_c.save(fn) fns.append(fn) if debug: print "" print "Load sub c again ..." for fn, sub_c in zip(fns, sub_cuboids): if debug: print "fn:", fn c_loaded = xndarray.load(fn) if debug: print "c_loaded:" print c_loaded.descrip() self.assertEqual(c_loaded, sub_c)
def extract_sub_mesh_with_files(input_mesh, center_node, radius, output_mesh=None): from nibabel import gifti from nibabel.gifti import GiftiImage, GiftiDataArray from pyhrf.tools.io import read_mesh cor, tri, coord_sys = read_mesh(input_mesh) sub_cor, sub_tri = extract_sub_mesh(cor, tri, center_node, radius) #nimg = GiftiImage_fromTriangles(sub_cor, sub_tri) nimg = GiftiImage() intent = 'NIFTI_INTENT_POINTSET' nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_cor,intent)) intent = 'NIFTI_INTENT_TRIANGLE' nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_tri,intent)) if output_mesh is None: output_mesh = non_existent_file(add_suffix(input_mesh, '_sub')) pyhrf.verbose(1, 'Saving extracted mesh to %s' %output_mesh) gifti.write(nimg, output_mesh) return sub_cor, sub_tri, coord_sys
def mesh_contour_with_files(input_mesh, input_labels, output_mesh=None, output_labels=None): """ TODO: use nibabel here """ # from gifti import loadImage, saveImage, GiftiDataArray, GiftiImage # from gifti import GiftiImage_fromarray, GiftiImage_fromTriangles # from gifti import GiftiIntentCode, GiftiEncoding labels = loadImage(input_labels).arrays[0].data.astype(int) cor, triangles = loadImage(input_mesh).arrays contour_cor, contour_tri = mesh_contour(cor.data, triangles.data.astype(int), labels) k = GiftiImage_fromarray(contour_cor) k.arrays[0].intentString = "NIFTI_INTENT_POINTSET" k.addDataArray_fromarray(contour_tri, GiftiIntentCode.NIFTI_INTENT_TRIANGLE) for a in k.arrays: a.encoding = GiftiEncoding.GIFTI_ENCODING_ASCII if output_mesh is None: output_mesh = non_existent_file(add_suffix(input_mesh, "_contour")) logger.info("saving to %s", output_mesh) k.save(output_mesh)
def mesh_contour_with_files(input_mesh, input_labels, output_mesh=None, output_labels=None): from gifti import loadImage, saveImage, GiftiDataArray, GiftiImage from gifti import GiftiImage_fromarray, GiftiImage_fromTriangles from gifti import GiftiIntentCode, GiftiEncoding labels = loadImage(input_labels).arrays[0].data.astype(int) cor, triangles = loadImage(input_mesh).arrays contour_cor, contour_tri = mesh_contour(cor.data, triangles.data.astype(int), labels) k = GiftiImage_fromarray(contour_cor) k.arrays[0].intentString = "NIFTI_INTENT_POINTSET" k.addDataArray_fromarray(contour_tri, GiftiIntentCode.NIFTI_INTENT_TRIANGLE) for a in k.arrays: a.encoding = GiftiEncoding.GIFTI_ENCODING_ASCII if output_mesh is None: output_mesh = non_existent_file(add_suffix(input_mesh, '_contour')) pyhrf.verbose(1, 'saving to %s' %output_mesh) k.save(output_mesh)
def project_fmri(input_mesh, data_file, output_tex_file, output_kernels_file=None, data_resolution=None, geod_decay=5., norm_decay=2., kernel_size=7, tex_bin_threshold=None): if output_kernels_file is None: tmp_dir = tempfile.mkdtemp(prefix='pyhrf_surf_proj', dir=pyhrf.cfg['global']['tmp_path']) kernels_file = op.join(tmp_dir, add_suffix(op.basename(data_file), '_kernels')) tmp_kernels_file = True else: kernels_file = output_kernels_file tmp_kernels_file = False if data_resolution is not None: resolution = data_resolution else: resolution = read_spatial_resolution(data_file) logger.info('Data resolution: %s', resolution) logger.info('Projection parameters:') logger.info(' - geodesic decay: %f mm', geod_decay) logger.info(' - normal decay: %f mm', norm_decay) logger.info(' - kernel size: %f voxels', kernel_size) create_projection_kernels(input_mesh, kernels_file, resolution, geod_decay, norm_decay, kernel_size) project_fmri_from_kernels(input_mesh, kernels_file, data_file, output_tex_file, tex_bin_threshold) if tmp_kernels_file: os.remove(kernels_file)
def extract_sub_mesh_with_files(input_mesh, center_node, radius, output_mesh=None): from nibabel import gifti from nibabel.gifti import GiftiImage, GiftiDataArray from pyhrf.tools._io import read_mesh cor, tri, coord_sys = read_mesh(input_mesh) sub_cor, sub_tri = extract_sub_mesh(cor, tri, center_node, radius) #nimg = GiftiImage_fromTriangles(sub_cor, sub_tri) nimg = GiftiImage() intent = 'NIFTI_INTENT_POINTSET' nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_cor, intent)) intent = 'NIFTI_INTENT_TRIANGLE' nimg.add_gifti_data_array(GiftiDataArray.from_array(sub_tri, intent)) if output_mesh is None: output_mesh = non_existent_file(add_suffix(input_mesh, '_sub')) logger.info('Saving extracted mesh to %s', output_mesh) gifti.write(nimg, output_mesh) return sub_cor, sub_tri, coord_sys
def project_fmri( input_mesh, data_file, output_tex_file, output_kernels_file=None, data_resolution=None, geod_decay=5.0, norm_decay=2.0, kernel_size=7, tex_bin_threshold=None, ): if output_kernels_file is None: tmp_dir = tempfile.mkdtemp(prefix="pyhrf_surf_proj", dir=pyhrf.cfg["global"]["tmp_path"]) kernels_file = op.join(tmp_dir, add_suffix(op.basename(data_file), "_kernels")) tmp_kernels_file = True else: kernels_file = output_kernels_file tmp_kernels_file = False if data_resolution is not None: resolution = data_resolution else: resolution = read_spatial_resolution(data_file) logger.info("Data resolution: %s", resolution) logger.info("Projection parameters:") logger.info(" - geodesic decay: %f mm", geod_decay) logger.info(" - normal decay: %f mm", norm_decay) logger.info(" - kernel size: %f voxels", kernel_size) create_projection_kernels(input_mesh, kernels_file, resolution, geod_decay, norm_decay, kernel_size) project_fmri_from_kernels(input_mesh, kernels_file, data_file, output_tex_file, tex_bin_threshold) if tmp_kernels_file: os.remove(kernels_file)
def make_outfile(fn, path, pre='', suf=''): if fn is None or path is None: return None ofn = op.join(path, fn) return add_prefix(add_suffix(ofn, suf), pre)