def estimate_structure(): """ CLI for estimating PVs from a single surface """ parser = CommonParser( 'ref', 'struct2ref', 'flirt', 'struct', 'super', 'out', 'surf', 'coords', description="Estimate PVs for a structure defined by a single surface." ) kwargs = vars(parser.parse_args()) ext = '.nii.gz' if not kwargs.get('out'): namebase = op.splitext(utils._splitExts(kwargs['ref'])[0])[0] sname = op.splitext(utils._splitExts(kwargs['surf'])[0])[0] outdir = op.dirname(kwargs['ref']) kwargs['out'] = op.join(outdir, '%s_%s_pvs%s' % (namebase, sname, ext)) else: if not kwargs['out'].endswith(ext): kwargs['out'] += ext # Estimate PVs = pvestimation.structure(**kwargs) # Output print('Saving output at', kwargs['out']) refSpace = ImageSpace(kwargs['ref']) refSpace.save_image(PVs, kwargs['out'])
def cortex(ref, struct2ref, **kwargs): """ Estimate PVs for L/R cortex. All arguments are kwargs. To estimate for a single hemisphere, provide only surfaces for that side. Required args: ref (str/regtricks ImageSpace): voxel grid in which to estimate PVs. struct2ref (str/np.array/rt.Registration): registration between space of surface and reference (see -flirt and -stuct). Use 'I' for identity. fsdir (str): path to a FreeSurfer subject directory. LWS/LPS/RWS/RPS (str): individual paths to the surfaces, eg LWS = Left White surface, RPS = Right Pial surace. Optional args: flirt (bool): denoting struct2ref is FLIRT transform; if so, set struct. struct (str): path to structural image from which surfaces were derived. cores (int): number of cores to use, default 8. supersample (int/array): single or 3 values, supersampling factor. Returns: (np.array), 4D, size equal to the reference image, with the PVs arranged GM/WM/non-brain in 4th dim. """ if not any([ kwargs.get('fsdir') is not None, any([ kwargs.get(s) is not None for s in ['LWS', 'LPS', 'RWS', 'RPS'] ]) ]): raise RuntimeError("Either a fsdir or paths to LWS/LPS etc" "must be given.") hemispheres = utils.load_surfs_to_hemispheres(**kwargs) # Either create local copy of ImageSpace object or init from path if isinstance(ref, ImageSpace): ref_space = copy.deepcopy(ref) else: ref_space = ImageSpace(ref) # Set supersampler and estimate. if kwargs.get('supersample') is None: supersampler = np.maximum(np.floor(ref_space.vox_size.round(1) / 0.75), 1).astype(np.int32) else: supersampler = kwargs.get('supersample') * np.ones(3) pvs = estimators._cortex(hemispheres, ref_space, struct2ref, supersampler, kwargs['cores'], bool(kwargs.get('ones'))) return pvs
def prepare_projector(): """ CLI for making a Projector """ parser = CommonParser( 'ref', 'struct2ref', 'flirt', 'struct', 'fsdir', 'LPS', 'LWS', 'RPS', 'RWS', 'cores', 'ones', 'out', 'super', description=( "Prepare a projector for a reference voxel grid and set " "of surfaces, and save in HDF5 format. This is a pre-processing " "step for performing surface-based analysis of volumetric data.")) args = parser.parse_args() if args.flirt: struct2ref = rt.Registration.from_flirt(args.struc2ref, args.struct, args.ref).src2ref elif args.struct2ref == "I": struct2ref = rt.Registration.identity().src2ref else: struct2ref = rt.Registration(args.struct2ref).src2ref # Set up the hemispheres, reference ImageSpace, and prepare projector. spc = ImageSpace(args.ref) hemispheres = utils.load_surfs_to_hemispheres(**vars(args)) hemispheres = [h.transform(struct2ref) for h in hemispheres] proj = projection.Projector(hemispheres, spc, args.super, args.cores, args.ones) # Add default .h5 extension if needed, make outdir, save. outdir, outname = op.split(args.out) outbase, outext = op.splitext(outname) if not outext: outext = '.h5' if outdir: os.makedirs(outdir, exist_ok=True) out = op.join(outdir, outbase + outext) proj.save(out)
def estimate_cortex(): """ CLI for estimating PVs from cortex (either L,R, or both) """ parser = CommonParser( 'ref', 'struct2ref', 'fsdir', 'LPS', 'RPS', 'RWS', 'LWS', 'flirt', 'struct', 'cores', 'out', 'ones', 'super', description="Estimate PVs for L/R cortical hemispheres") kwargs = vars(parser.parse_args()) # Estimation PVs = pvestimation.cortex(**kwargs) # Output ext = '.nii.gz' if not kwargs.get('out'): namebase = op.splitext(utils._splitExts(kwargs['ref'])[0])[0] outdir = op.join(op.dirname(kwargs['ref']), namebase + '_cortexpvs') else: outdir = kwargs['out'] utils._weak_mkdir(outdir) refSpace = ImageSpace(kwargs['ref']) print('Saving output at', outdir) p = op.join(outdir, 'stacked' + ext) refSpace.save_image(PVs, p) for i, t in enumerate(['GM', 'WM', 'nonbrain']): p = op.join(outdir, t + ext) refSpace.save_image(PVs[:, :, :, i], p)
def structure(ref, struct2ref, **kwargs): """ Estimate PVs for a structure defined by a single surface. All arguments are kwargs. Required args: ref (str/regtricks ImageSpace): voxel grid in which to estimate PVs. struct2ref (str/np.array/rt.Registration): registration between space of surface and reference (see -flirt and -stuct). Use 'I' for identity. surf (str): path to surface (see coords argument below) Optional args: flirt (bool): denoting struct2ref is FLIRT transform; if so, set struct. coords (str): convention by which surface is defined: default is 'world' (mm coords), for FIRST surfaces set as 'fsl' and provide struct argument struct (str): path to structural image from which surfaces were derived cores (int): number of cores to use, default 8 supersample (int/array): single or 3 values, supersampling factor Returns: (np.array) PV image, sized equal to reference space """ # Check we either have a surface object or path to one if not bool(kwargs.get('surf')): raise RuntimeError( "surf kwarg must be a Surface object or path to one") coords = kwargs.get('coords', 'world') if coords == 'fsl' and not kwargs.get('struct'): raise RuntimeError("Structural image must be supplied for FIRST surfs") if type(kwargs['surf']) is str: surf = Surface(kwargs['surf'], name=op.split(kwargs['surf'])[1]) if kwargs.get('coords', 'world') == 'fsl': struct_spc = ImageSpace(kwargs['struct']) surf = surf.transform(struct_spc.FSL2world) elif type(kwargs['surf']) is not Surface: raise RuntimeError( "surf kwarg must be a Surface object or path to one") else: surf = kwargs['surf'] # Either create local copy of ImageSpace object or init from path if isinstance(ref, ImageSpace): ref_space = copy.deepcopy(ref) else: ref_space = ImageSpace(ref) if kwargs.get('supersample') is None: supersampler = np.maximum(np.floor(ref_space.vox_size.round(1) / 0.75), 1).astype(np.int32) else: supersampler = kwargs.get('supersample') * np.ones(3) pvs = estimators._structure(surf, ref_space, struct2ref, supersampler, bool(kwargs.get('ones')), kwargs['cores']) return pvs
def complete(ref, struct2ref, **kwargs): """ Estimate PVs for cortex and all structures identified by FIRST within a reference image space. Use FAST to fill in non-surface PVs. All arguments are kwargs. Required args: ref (str/regtricks ImageSpace): voxel grid in which to estimate PVs. struct2ref (str/np.array/rt.Registration): registration between space of surface and reference (see -flirt and -stuct). Use 'I' for identity. fslanat: path to fslanat directory. This REPLACES firstdir/fastdir/struct. firstdir (str): FIRST directory in which .vtk surfaces are located fastdir (str): FAST directory in which _pve_0/1/2 are located struct (str): path to structural image from which FIRST surfaces were dervied fsdir (str): FreeSurfer subject directory, OR: LWS/LPS/RWS/RPS (str): paths to individual surfaces (L/R white/pial) Optional args: flirt (bool): denoting struct2ref is FLIRT transform; if so, set struct. coords (str): convention by which surface is defined: default is 'world' (mm coords), for FIRST surfaces set as 'fsl' and provide struct argument struct (str): path to structural image from which surfaces were derived cores (int): number of cores to use, default 8 supersample (int/array): single or 3 values, supersampling factor Returns: (dict) PVs associated with each individual structure and also the overall combined result ('stacked') """ print("Estimating PVs for", ref.file_name) # If anat dir then various subdirs are loaded by @enforce_common_args # If not then direct load below if not bool(kwargs.get('fsdir')): if not all([bool(kwargs.get(k)) for k in ['LWS', 'LPS', 'RWS', 'RPS']]): raise RuntimeError("If fsdir not given, " + "provide paths for LWS,LPS,RWS,RPS") if not bool(kwargs.get('fslanat')): if not (bool(kwargs.get('fastdir')) and bool(kwargs.get('firstdir'))): raise RuntimeError( "If not using anat dir, fastdir/firstdir required") # Resample FASTs to reference space. Then redefine CSF as 1-(GM+WM) fast_paths = utils._loadFASTdir(kwargs['fastdir']) fast_spc = fast_paths['FAST_GM'] fast = np.stack([ nibabel.load(fast_paths[f'FAST_{p}']).get_fdata() for p in ['GM', 'WM'] ], axis=-1) fasts_transformed = rt.Registration(struct2ref).apply_to_array( fast, fast_spc, ref) output = dict(FAST_GM=fasts_transformed[..., 0], FAST_WM=fasts_transformed[..., 1]) output['FAST_CSF'] = np.maximum( 0, 1 - (output['FAST_WM'] + output['FAST_GM'])) # Process subcortical structures first. FIRSTsurfs = utils._loadFIRSTdir(kwargs['firstdir']) subcortical = [] struct_spc = ImageSpace(kwargs['struct']) for name, surf in FIRSTsurfs.items(): s = Surface(surf, name) s = s.transform(struct_spc.FSL2world) subcortical.append(s) disp = "Structures found: " + ", ".join([s.name for s in subcortical] + ['Cortex']) print(disp) # To estimate against each subcortical structure, we apply the following # partial func to each using a map() call. Carry kwargs from this func desc = 'Subcortical structures' estimator = functools.partial(__structure_wrapper, ref=ref, struct2ref=struct2ref, **kwargs) # This is equivalent to a map(estimator, subcortical) call # All the extra stuff (tqdm etc) is used for progress bar results = [ pv for _, pv in tqdm.tqdm(enumerate(map(estimator, subcortical)), total=len(subcortical), desc=desc, bar_format=core.BAR_FORMAT, ascii=True) ] output.update(dict(zip([s.name for s in subcortical], results))) # Now do the cortex, then stack the whole lot ctx = cortex(ref=ref, struct2ref=struct2ref, **kwargs) for i, t in enumerate(['_GM', '_WM', '_nonbrain']): output['cortex' + t] = (ctx[:, :, :, i]) stacked = estimators.stack_images( {k: v for k, v in output.items() if k != 'BrStem'}) output['GM'] = stacked[:, :, :, 0] output['WM'] = stacked[:, :, :, 1] output['nonbrain'] = stacked[:, :, :, 2] output['stacked'] = stacked return output
def estimate_complete(): """ CLI for estimating PVs for L/R cortex and subcortex """ parser = CommonParser( 'ref', 'struct2ref', 'fslanat', 'fsdir', 'firstdir', 'fastdir', 'LPS', 'LWS', 'RPS', 'RWS', 'ones', 'super', 'cores', 'out', 'flirt', 'struct', description=( "Estimate PVs for cortex and all structures identified " "by FIRST within a reference image space. Use FAST to fill in " "non-surface PVs")) kwargs = vars(parser.parse_args()) # Unless we have been given prepared fslanat dir, we will provide the path # to the next function to create one if type(kwargs.get('fslanat')) is str: if not op.isdir(kwargs.get('fslanat')): raise RuntimeError("fslanat dir %s does not exist" % kwargs['fslanat']) else: if not all([(('fastdir' in kwargs) and ('firstdir' in kwargs)), (('LPS' in kwargs) and ('RPS' in kwargs))]): raise RuntimeError( "Either separate -firstdir and -fastdir" + " must be provided, or an -fslanat dir must be provided") output = pvestimation.complete(**kwargs) # Output paths. If given -out then use that as output, otherwise # save alongside reference image ext = '.nii.gz' if not kwargs.get('out'): namebase = op.splitext(utils._splitExts(kwargs['ref'])[0])[0] outdir = op.join(op.dirname(kwargs['ref']), namebase + '_surfpvs') else: outdir = kwargs['out'] # Make output dirs if they do not exist. intermediatedir = op.join(outdir, 'intermediate_pvs') utils._weak_mkdir(outdir) utils._weak_mkdir(intermediatedir) # Load the reference image space and save the various outputs. # 'stacked' goes in the outdir, all others go in outdir/intermediate refSpace = ImageSpace(kwargs['ref']) print('Saving output at', outdir) for k, o in output.items(): if k in ['stacked', 'GM', 'WM', 'nonbrain']: path = op.join(outdir, k + ext) else: path = op.join(intermediatedir, k + ext) refSpace.save_image(o, path)
def load(cls, path): """ Load Projector from path in HDF5 format. This is useful for performing repeated analyses with the same voxel grid and cortical surfaces. """ f = h5py.File(path, 'r') p = cls.__new__(cls) # Recreate the reference ImageSpace first p.spc = ImageSpace.manual(f['ref_spc_vox2world'][()], f['ref_spc_size'][()]) if 'ref_spc_fname' in f: p.spc.fname = f['ref_spc_fname'][()] n_vox = p.spc.size.prod() # Now read out hemisphere specific properties p._hemi_pvs = [] p.vox_tri_mats = [] p.vtx_tri_mats = [] p.hemi_dict = {} p._roi_pvs = {} for s in SIDES: hemi_key = f"{s}_hemi" if hemi_key in f: # Read out the surfaces, create the Hemisphere ins, outs = [ Surface.manual( f[hemi_key][f'{s}{n}S_points'][()], f[hemi_key][f'{s}{n}S_tris'][()], f'{s}{n}S') for n in ['W', 'P'] ] p.hemi_dict[s] = Hemisphere(ins, outs, s) # Read out the PVs array for the hemi p._hemi_pvs.append(f[hemi_key][f"{s}_pvs"][()]) # Recreate the sparse voxtri and vtxtri matrices. # They are stored as a 3 x N array, where top row # is row indices, second is column, then data voxtri = f[hemi_key][f"{s}_vox_tri"][()] assert voxtri.shape[0] == 3, 'expected 3 rows' voxtri = sparse.coo_matrix( (voxtri[2,:], (voxtri[0,:], voxtri[1,:])), shape=(n_vox, ins.tris.shape[0])) p.vox_tri_mats.append(voxtri.tocsr()) # Same convention as above vtxtri = f[hemi_key][f"{s}_vtx_tri"][()] assert vtxtri.shape[0] == 3, 'expected 3 rows' vtxtri = sparse.coo_matrix( (vtxtri[2,:], (vtxtri[0,:], vtxtri[1,:])), shape=(ins.n_points, ins.tris.shape[0])) p.vtx_tri_mats.append(vtxtri.tocsr()) if "subcortical_pvs" in f: g = f["subcortical_pvs"] for k in sorted(g.keys()): p._roi_pvs[k] = g[k][()] return p