コード例 #1
0
def align_epi_anat(anatomy, epi_dsets, skull_strip_anat=True):
    ''' aligns epis to anatomy using ``align_epi_anat.py`` script
    
    :epi_dsets:       can be either a string or list of strings of the epi child datasets
    :skull_strip_anat:     if ``True``, ``anatomy`` will be skull-stripped using the default method
    
    The default output suffix is "_al"
    '''

    if isinstance(epi_dsets, basestring):
        epi_dsets = [epi_dsets]

    if len(epi_dsets) == 0:
        nl.notify('Warning: no epi alignment datasets given for anatomy %s!' %
                  anatomy,
                  level=nl.level.warning)
        return

    if all(os.path.exists(nl.suffix(x, '_al')) for x in epi_dsets):
        return

    anatomy_use = anatomy

    if skull_strip_anat:
        nl.skull_strip(anatomy, '_ns')
        anatomy_use = nl.suffix(anatomy, '_ns')

    inputs = [anatomy_use] + epi_dsets
    dset_products = lambda dset: [
        nl.suffix(dset, '_al'),
        nl.prefix(dset) + '_al_mat.aff12.1D',
        nl.prefix(dset) + '_tsh_vr_motion.1D'
    ]
    products = nl.flatten([dset_products(dset) for dset in epi_dsets])
    with nl.run_in_tmp(inputs, products):
        if nl.is_nifti(anatomy_use):
            anatomy_use = nl.afni_copy(anatomy_use)
        epi_dsets_use = []
        for dset in epi_dsets:
            if nl.is_nifti(dset):
                epi_dsets_use.append(nl.afni_copy(dset))
            else:
                epi_dsets_use.append(dset)
        cmd = [
            "align_epi_anat.py", "-epi2anat", "-anat_has_skull", "no",
            "-epi_strip", "3dAutomask", "-anat", anatomy_use, "-epi_base", "5",
            "-epi", epi_dsets_use[0]
        ]
        if len(epi_dsets_use) > 1:
            cmd += ['-child_epi'] + epi_dsets_use[1:]
            out = nl.run(cmd)

        for dset in epi_dsets:
            if nl.is_nifti(dset):
                dset_nifti = nl.nifti_copy(nl.prefix(dset) + '_al+orig')
                if dset_nifti and os.path.exists(
                        dset_nifti) and dset_nifti.endswith(
                            '.nii') and dset.endswith('.gz'):
                    nl.run(['gzip', dset_nifti])
コード例 #2
0
ファイル: afni.py プロジェクト: alistairwalsh/neural
def skull_strip(dset, suffix='_ns', prefix=None, unifize=True):
    if prefix == None:
        prefix = nl.suffix(dset, suffix)
    nl.run([
        '3dSkullStrip', '-input', dset, '-prefix', prefix, '-niter', '400',
        '-ld', '40'
    ],
           products=nl.suffix(dset, suffix))
コード例 #3
0
ファイル: afni.py プロジェクト: azraq27/neural
def skull_strip(dset,suffix='_ns',prefix=None,unifize=True):
    if prefix==None:
        prefix = nl.suffix(dset,suffix)
    nl.run([
        '3dSkullStrip',
        '-input', dset,
        '-prefix', prefix,
        '-niter', '400',
        '-ld', '40'
    ],products=nl.suffix(dset,suffix))
コード例 #4
0
ファイル: alignment.py プロジェクト: azraq27/neural
def affine_align(dset_from,dset_to,skull_strip=True,mask=None,suffix='_aff',prefix=None,cost=None,epi=False,resample='wsinc5',grid_size=None,opts=[]):
    ''' interface to 3dAllineate to align anatomies and EPIs '''

    dset_ss = lambda dset: os.path.split(nl.suffix(dset,'_ns'))[1]
    def dset_source(dset):
        if skull_strip==True or skull_strip==dset:
            return dset_ss(dset)
        else:
            return dset

    dset_affine = prefix
    if dset_affine==None:
        dset_affine = os.path.split(nl.suffix(dset_from,suffix))[1]
    dset_affine_mat_1D = nl.prefix(dset_affine) + '_matrix.1D'
    dset_affine_par_1D = nl.prefix(dset_affine) + '_params.1D'

    if os.path.exists(dset_affine):
        # final product already exists
        return

    for dset in [dset_from,dset_to]:
        if skull_strip==True or skull_strip==dset:
            nl.skull_strip(dset,'_ns')

    mask_use = mask
    if mask:
        # the mask was probably made in the space of the original dset_to anatomy,
        # which has now been cropped from the skull stripping. So the lesion mask
        # needs to be resampled to match the corresponding mask
        if skull_strip==True or skull_strip==dset_to:
            nl.run(['3dresample','-master',dset_u(dset_ss(dset)),'-inset',mask,'-prefix',nl.suffix(mask,'_resam')],products=nl.suffix(mask,'_resam'))
            mask_use = nl.suffix(mask,'_resam')

    all_cmd = [
        '3dAllineate',
        '-prefix', dset_affine,
        '-base', dset_source(dset_to),
        '-source', dset_source(dset_from),
        '-source_automask',
        '-1Dmatrix_save', dset_affine_mat_1D,
        '-1Dparam_save',dset_affine_par_1D,
        '-autoweight',
        '-final',resample,
        '-cmass'
    ] + opts
    if grid_size:
        all_cmd += ['-newgrid',grid_size]
    if cost:
        all_cmd += ['-cost',cost]
    if epi:
        all_cmd += ['-EPI']
    if mask:
        all_cmd += ['-emask', mask_use]

    nl.run(all_cmd,products=dset_affine)
コード例 #5
0
ファイル: alignment.py プロジェクト: azraq27/neural
def volreg(dset,suffix='_volreg',base=3,tshift=3,dfile_suffix='_volreg.1D'):
    '''simple interface to 3dvolreg

        :suffix:        suffix to add to ``dset`` for volreg'ed file
        :base:          either a number or ``dset[#]`` of the base image to register to
        :tshift:        if a number, then tshift ignoring that many images, if ``None``
                        then don't tshift
        :dfile_suffix:  suffix to add to ``dset`` to save the motion parameters to
    '''
    cmd = ['3dvolreg','-prefix',nl.suffix(dset,suffix),'-base',base,'-dfile',nl.prefix(dset)+dfile_suffix]
    if tshift:
        cmd += ['-tshift',tshift]
    cmd += [dset]
    nl.run(cmd,products=nl.suffix(dset,suffix))
コード例 #6
0
def qwarp_epi(dset, align_subbrick=5, suffix='_qwal', prefix=None):
    '''aligns an EPI time-series using 3dQwarp
    
    Very expensive and not efficient at all, but it can produce pretty impressive alignment for EPI time-series with significant
    distortions due to motion'''
    info = nl.dset_info(dset)
    if info == None:
        nl.notify('Error reading dataset "%s"' % (dset), level=nl.level.error)
        return False
    if prefix == None:
        prefix = nl.suffix(dset, suffix)
    dset_sub = lambda x: '_tmp_qwarp_epi-%s_%d.nii.gz' % (nl.prefix(dset), x)
    try:
        align_dset = nl.suffix(dset_sub(align_subbrick), '_warp')
        nl.calc('%s[%d]' % (dset, align_subbrick),
                expr='a',
                prefix=align_dset,
                datum='float')
        for i in xrange(info.reps):
            if i != align_subbrick:
                nl.calc('%s[%d]' % (dset, i),
                        expr='a',
                        prefix=dset_sub(i),
                        datum='float')
                nl.run([
                    '3dQwarp', '-nowarp', '-workhard', '-superhard',
                    '-minpatch', '9', '-blur', '0', '-pear', '-nopenalty',
                    '-base', align_dset, '-source',
                    dset_sub(i), '-prefix',
                    nl.suffix(dset_sub(i), '_warp')
                ],
                       quiet=True)
        cmd = ['3dTcat', '-prefix', prefix]
        if info.TR:
            cmd += ['-tr', info.TR]
        if info.slice_timing:
            cmd += ['-tpattern', info.slice_timing]
        cmd += [nl.suffix(dset_sub(i), '_warp') for i in xrange(info.reps)]
        nl.run(cmd, quiet=True)
    except Exception as e:
        raise e
    finally:
        for i in xrange(info.reps):
            for suffix in ['', 'warp']:
                try:
                    os.remove(nl.suffix(dset_sub(i), suffix))
                except:
                    pass
コード例 #7
0
def cluster(dset, min_distance, min_cluster_size, prefix=None):
    '''clusters given ``dset`` connecting voxels ``min_distance``mm away with minimum cluster size of ``min_cluster_size``
    default prefix is ``dset`` suffixed with ``_clust%d``'''
    if prefix == None:
        prefix = nl.suffix(dset, '_clust%d' % min_cluster_size)
    return available_method('cluster')(dset, min_distance, min_cluster_size,
                                       prefix)
コード例 #8
0
ファイル: afni.py プロジェクト: azraq27/neural
def roi_stats(mask,dset):
    out_dict = {}
    values = [{'Med': 'median', 'Min': 'min', 'Max': 'max',
               'NZMean': 'nzmean', 'NZSum': 'nzsum', 'NZSigma': 'nzsigma',
               'Mean': 'mean', 'Sigma': 'sigma', 'Mod': 'mode','NZcount':'nzvoxels'},
              {'NZMod': 'nzmode', 'NZMed': 'nzmedian', 'NZMax': 'nzmax', 'NZMin': 'nzmin','Mean':'mean'}]
    options = [['-nzmean','-nzsum','-nzvoxels','-minmax','-sigma','-nzsigma','-median','-mode'],
               ['-nzminmax','-nzmedian','-nzmode']]
    if not nl.dset_grids_equal((mask,dset)):
        i = nl.dset_info(dset)
        grid_hash = '_' + '_'.join([str(x) for x in (i.voxel_size + i.voxel_dims)])
        new_mask = nl.suffix(mask,grid_hash)
        if not os.path.exists(new_mask):
            nl.run(["3dfractionize","-template",dset,"-input",nl.calc(mask,"a",datum="short"),"-prefix",new_mask,"-preserve","-clip","0.2"])
        mask = new_mask
    for i in xrange(len(values)):
        cmd = ['3dROIstats','-1Dformat','-nobriklab','-mask',mask] + options[i] + [dset]
        out = subprocess.check_output(cmd).split('\n')
        header = [(values[i][x.split('_')[0]],int(x.split('_')[1])) for x in out[1].split()[1:]]
        for j in xrange(len(out)/2-1):
            stats = [float(x) for x in out[(j+1)*2+1][1:].split()]
            for s in xrange(len(stats)):
                roi = header[s][1]
                stat_name = header[s][0]
                stat = stats[s]
                if roi not in out_dict:
                    out_dict[roi] = {}
                out_dict[roi][stat_name] = stat
    return out_dict
コード例 #9
0
ファイル: alignment.py プロジェクト: azraq27/neural
def align_epi(anatomy,epis,suffix='_al',base=3,skull_strip=True):
    '''[[currently in progress]]: a simple replacement for the ``align_epi_anat.py`` script, because I've found it to be unreliable, in my usage'''
    for epi in epis:
        nl.tshift(epi,suffix='_tshift')
        nl.affine_align(nl.suffix(epi,'_tshift'),'%s[%d]'%(epis[0],base),skull_strip=False,epi=True,cost='crM',resample='wsinc5',grid_size=nl.dset_info(epi).voxel_size[0],suffix='_al')
    ss = [anatomy] if skull_strip else False
    nl.affine_align(anatomy,'%s[%d]'%(epis[0],base),skull_strip=ss,cost='lpa',grid_size=1,opts=['-interp','cubic'],suffix='_al-to-EPI')
コード例 #10
0
ファイル: alignment.py プロジェクト: azraq27/neural
def align_epi_anat(anatomy,epi_dsets,skull_strip_anat=True):
    ''' aligns epis to anatomy using ``align_epi_anat.py`` script

    :epi_dsets:       can be either a string or list of strings of the epi child datasets
    :skull_strip_anat:     if ``True``, ``anatomy`` will be skull-stripped using the default method

    The default output suffix is "_al"
    '''

    if isinstance(epi_dsets,basestring):
        epi_dsets = [epi_dsets]

    if len(epi_dsets)==0:
        nl.notify('Warning: no epi alignment datasets given for anatomy %s!' % anatomy,level=nl.level.warning)
        return

    if all(os.path.exists(nl.suffix(x,'_al')) for x in epi_dsets):
        return

    anatomy_use = anatomy

    if skull_strip_anat:
        nl.skull_strip(anatomy,'_ns')
        anatomy_use = nl.suffix(anatomy,'_ns')

    inputs = [anatomy_use] + epi_dsets
    dset_products = lambda dset: [nl.suffix(dset,'_al'), nl.prefix(dset)+'_al_mat.aff12.1D', nl.prefix(dset)+'_tsh_vr_motion.1D']
    products = nl.flatten([dset_products(dset) for dset in epi_dsets])
    with nl.run_in_tmp(inputs,products):
        if nl.is_nifti(anatomy_use):
            anatomy_use = nl.afni_copy(anatomy_use)
        epi_dsets_use = []
        for dset in epi_dsets:
            if nl.is_nifti(dset):
                epi_dsets_use.append(nl.afni_copy(dset))
            else:
                epi_dsets_use.append(dset)
        cmd = ["align_epi_anat.py", "-epi2anat", "-anat_has_skull", "no", "-epi_strip", "3dAutomask","-anat", anatomy_use, "-epi_base", "5", "-epi", epi_dsets_use[0]]
        if len(epi_dsets_use)>1:
            cmd += ['-child_epi'] + epi_dsets_use[1:]
            out = nl.run(cmd)

        for dset in epi_dsets:
            if nl.is_nifti(dset):
                dset_nifti = nl.nifti_copy(nl.prefix(dset)+'_al+orig')
                if dset_nifti and os.path.exists(dset_nifti) and dset_nifti.endswith('.nii') and dset.endswith('.gz'):
                    nl.run(['gzip',dset_nifti])
コード例 #11
0
def qwarp_apply(dset_from,
                dset_warp,
                affine=None,
                warp_suffix='_warp',
                master='WARP',
                interp=None,
                prefix=None):
    '''applies the transform from a previous qwarp
    
    Uses the warp parameters from the dataset listed in 
    ``dset_warp`` (usually the dataset name ends in ``_WARP``) 
    to the dataset ``dset_from``. If a ``.1D`` file is given
    in the ``affine`` parameter, it will be applied simultaneously
    with the qwarp.
    
    If the parameter ``interp`` is given, will use as interpolation method,
    otherwise it will just use the default (currently wsinc5)
    
    Output dataset with have the ``warp_suffix`` suffix added to its name
    '''
    out_dset = prefix
    if out_dset == None:
        out_dset = os.path.split(nl.suffix(dset_from, warp_suffix))[1]
    dset_from_info = nl.dset_info(dset_from)
    dset_warp_info = nl.dset_info(dset_warp)
    if (dset_from_info.orient != dset_warp_info.orient):
        # If the datasets are different orientations, the transform won't be applied correctly
        nl.run([
            '3dresample', '-orient', dset_warp_info.orient, '-prefix',
            nl.suffix(dset_from, '_reorient'), '-inset', dset_from
        ],
               products=nl.suffix(dset_from, '_reorient'))
        dset_from = nl.suffix(dset_from, '_reorient')
    warp_opt = str(dset_warp)
    if affine:
        warp_opt += ' ' + affine
    cmd = ['3dNwarpApply', '-nwarp', warp_opt]
    cmd += ['-source', dset_from, '-master', master, '-prefix', out_dset]

    if interp:
        cmd += ['-interp', interp]

    nl.run(cmd, products=out_dset)
コード例 #12
0
ファイル: alignment.py プロジェクト: azraq27/neural
def qwarp_epi(dset,align_subbrick=5,suffix='_qwal',prefix=None):
    '''aligns an EPI time-series using 3dQwarp

    Very expensive and not efficient at all, but it can produce pretty impressive alignment for EPI time-series with significant
    distortions due to motion'''
    info = nl.dset_info(dset)
    if info==None:
        nl.notify('Error reading dataset "%s"' % (dset),level=nl.level.error)
        return False
    if prefix==None:
        prefix = nl.suffix(dset,suffix)
    dset_sub = lambda x: '_tmp_qwarp_epi-%s_%d.nii.gz' % (nl.prefix(dset),x)
    try:
        align_dset = nl.suffix(dset_sub(align_subbrick),'_warp')
        nl.calc('%s[%d]' % (dset,align_subbrick),expr='a',prefix=align_dset,datum='float')
        for i in xrange(info.reps):
            if i != align_subbrick:
                nl.calc('%s[%d]' % (dset,i),expr='a',prefix=dset_sub(i),datum='float')
                nl.run([
                    '3dQwarp', '-nowarp',
                    '-workhard', '-superhard', '-minpatch', '9', '-blur', '0',
                    '-pear', '-nopenalty',
                    '-base', align_dset,
                    '-source', dset_sub(i),
                    '-prefix', nl.suffix(dset_sub(i),'_warp')
                ],quiet=True)
        cmd = ['3dTcat','-prefix',prefix]
        if info.TR:
            cmd += ['-tr',info.TR]
        if info.slice_timing:
            cmd += ['-tpattern',info.slice_timing]
        cmd += [nl.suffix(dset_sub(i),'_warp') for i in xrange(info.reps)]
        nl.run(cmd,quiet=True)
    except Exception as e:
        raise e
    finally:
        for i in xrange(info.reps):
            for suffix in ['','warp']:
                try:
                    os.remove(nl.suffix(dset_sub(i),suffix))
                except:
                    pass
コード例 #13
0
ファイル: qc.py プロジェクト: alistairwalsh/neural
def temporal_snr(signal_dset,
                 noise_dset,
                 mask=None,
                 prefix='temporal_snr.nii.gz'):
    '''Calculates temporal SNR by dividing average signal of ``signal_dset`` by SD of ``noise_dset``.
    ``signal_dset`` should be a dataset that contains the average signal value (i.e., nothing that has
    been detrended by removing the mean), and ``noise_dset`` should be a dataset that has all possible
    known signal fluctuations (e.g., task-related effects) removed from it (the residual dataset from a 
    deconvolve works well)'''
    for d in [('mean', signal_dset), ('stdev', noise_dset)]:
        new_d = nl.suffix(d[1], '_%s' % d[0])
        cmd = ['3dTstat', '-%s' % d[0], '-prefix', new_d]
        if mask:
            cmd += ['-mask', mask]
        cmd += [d[1]]
        nl.run(cmd, products=new_d)
    nl.calc([nl.suffix(signal_dset, '_mean'),
             nl.suffix(noise_dset, '_stdev')],
            'a/b',
            prefix=prefix)
コード例 #14
0
ファイル: alignment.py プロジェクト: azraq27/neural
def skullstrip_template(dset,template,prefix=None,suffix=None,dilate=0):
    '''Takes the raw anatomy ``dset``, aligns it to a template brain, and applies a templated skullstrip. Should produce fairly reliable skullstrips as long
    as there is a decent amount of normal brain and the overall shape of the brain is normal-ish'''
    if suffix==None:
        suffix = '_sstemplate'
    if prefix==None:
        prefix = nl.suffix(dset,suffix)
    if not os.path.exists(prefix):
        with nl.notify('Running template-based skull-strip on %s' % dset):
            dset = os.path.abspath(dset)
            template = os.path.abspath(template)
            tmp_dir = tempfile.mkdtemp()
            cwd = os.getcwd()
            with nl.run_in(tmp_dir):
                nl.affine_align(template,dset,skull_strip=None,cost='mi',opts=['-nmatch','100%'])
                nl.run(['3dQwarp','-minpatch','20','-penfac','10','-noweight','-source',nl.suffix(template,'_aff'),'-base',dset,'-prefix',nl.suffix(template,'_qwarp')],products=nl.suffix(template,'_qwarp'))
                info = nl.dset_info(nl.suffix(template,'_qwarp'))
                max_value = info.subbricks[0]['max']
                nl.calc([dset,nl.suffix(template,'_qwarp')],'a*step(b-%f*0.05)'%max_value,prefix)
                shutil.move(prefix,cwd)
            shutil.rmtree(tmp_dir)
コード例 #15
0
ファイル: alignment.py プロジェクト: azraq27/neural
def affine_apply(dset_from,affine_1D,master,affine_suffix='_aff',interp='NN',inverse=False,prefix=None):
    '''apply the 1D file from a previously aligned dataset
    Applies the matrix in ``affine_1D`` to ``dset_from`` and makes the final grid look like the dataset ``master``
    using the interpolation method ``interp``. If ``inverse`` is True, will apply the inverse of ``affine_1D`` instead'''
    affine_1D_use = affine_1D
    if inverse:
        with tempfile.NamedTemporaryFile(delete=False) as temp:
            temp.write(subprocess.check_output(['cat_matvec',affine_1D,'-I']))
            affine_1D_use = temp.name
    if prefix==None:
        prefix = nl.suffix(dset_from,affine_suffix)
    nl.run(['3dAllineate','-1Dmatrix_apply',affine_1D_use,'-input',dset_from,'-prefix',prefix,'-master',master,'-final',interp],products=prefix)
コード例 #16
0
def skullstrip_template(dset, template, prefix=None, suffix=None, dilate=0):
    '''Takes the raw anatomy ``dset``, aligns it to a template brain, and applies a templated skullstrip. Should produce fairly reliable skullstrips as long
    as there is a decent amount of normal brain and the overall shape of the brain is normal-ish'''
    if suffix == None:
        suffix = '_sstemplate'
    if prefix == None:
        prefix = nl.suffix(dset, suffix)
    if not os.path.exists(prefix):
        with nl.notify('Running template-based skull-strip on %s' % dset):
            dset = os.path.abspath(dset)
            template = os.path.abspath(template)
            tmp_dir = tempfile.mkdtemp()
            cwd = os.getcwd()
            with nl.run_in(tmp_dir):
                nl.affine_align(template,
                                dset,
                                skull_strip=None,
                                cost='mi',
                                opts=['-nmatch', '100%'])
                nl.run([
                    '3dQwarp', '-minpatch', '20', '-penfac', '10', '-noweight',
                    '-source',
                    nl.suffix(template, '_aff'), '-base', dset, '-prefix',
                    nl.suffix(template, '_qwarp')
                ],
                       products=nl.suffix(template, '_qwarp'))
                info = nl.dset_info(nl.suffix(template, '_qwarp'))
                max_value = info.subbricks[0]['max']
                nl.calc([dset, nl.suffix(template, '_qwarp')],
                        'a*step(b-%f*0.05)' % max_value, prefix)
                shutil.move(prefix, cwd)
            shutil.rmtree(tmp_dir)
コード例 #17
0
ファイル: afni.py プロジェクト: alistairwalsh/neural
def roi_stats(mask, dset):
    out_dict = {}
    values = [{
        'Med': 'median',
        'Min': 'min',
        'Max': 'max',
        'NZMean': 'nzmean',
        'NZSum': 'nzsum',
        'NZSigma': 'nzsigma',
        'Mean': 'mean',
        'Sigma': 'sigma',
        'Mod': 'mode',
        'NZcount': 'nzvoxels'
    }, {
        'NZMod': 'nzmode',
        'NZMed': 'nzmedian',
        'NZMax': 'nzmax',
        'NZMin': 'nzmin',
        'Mean': 'mean'
    }]
    options = [[
        '-nzmean', '-nzsum', '-nzvoxels', '-minmax', '-sigma', '-nzsigma',
        '-median', '-mode'
    ], ['-nzminmax', '-nzmedian', '-nzmode']]
    if not nl.dset_grids_equal((mask, dset)):
        i = nl.dset_info(dset)
        grid_hash = '_' + '_'.join(
            [str(x) for x in (i.voxel_size + i.voxel_dims)])
        new_mask = nl.suffix(mask, grid_hash)
        if not os.path.exists(new_mask):
            nl.run([
                "3dfractionize", "-template", dset, "-input",
                nl.calc(mask, "a", datum="short"), "-prefix", new_mask,
                "-preserve", "-clip", "0.2"
            ])
        mask = new_mask
    for i in xrange(len(values)):
        cmd = ['3dROIstats', '-1Dformat', '-nobriklab', '-mask', mask
               ] + options[i] + [dset]
        out = subprocess.check_output(cmd).split('\n')
        header = [(values[i][x.split('_')[0]], int(x.split('_')[1]))
                  for x in out[1].split()[1:]]
        for j in xrange(len(out) / 2 - 1):
            stats = [float(x) for x in out[(j + 1) * 2 + 1][1:].split()]
            for s in xrange(len(stats)):
                roi = header[s][1]
                stat_name = header[s][0]
                stat = stats[s]
                if roi not in out_dict:
                    out_dict[roi] = {}
                out_dict[roi][stat_name] = stat
    return out_dict
コード例 #18
0
ファイル: dsets.py プロジェクト: azraq27/neural
def resample_dset(dset,template,prefix=None,resam='NN'):
    '''Resamples ``dset`` to the grid of ``template`` using resampling mode ``resam``.
    Default prefix is to suffix ``_resam`` at the end of ``dset``

    Available resampling modes:
        :NN:    Nearest Neighbor
        :Li:    Linear
        :Cu:    Cubic
        :Bk:    Blocky
    '''
    if prefix==None:
        prefix = nl.suffix(dset,'_resam')
    nl.run(['3dresample','-master',template,'-rmode',resam,'-prefix',prefix,'-inset',dset])
コード例 #19
0
def volreg(dset,
           suffix='_volreg',
           base=3,
           tshift=3,
           dfile_suffix='_volreg.1D'):
    '''simple interface to 3dvolreg
    
        :suffix:        suffix to add to ``dset`` for volreg'ed file
        :base:          either a number or ``dset[#]`` of the base image to register to
        :tshift:        if a number, then tshift ignoring that many images, if ``None``
                        then don't tshift
        :dfile_suffix:  suffix to add to ``dset`` to save the motion parameters to
    '''
    cmd = [
        '3dvolreg', '-prefix',
        nl.suffix(dset, suffix), '-base', base, '-dfile',
        nl.prefix(dset) + dfile_suffix
    ]
    if tshift:
        cmd += ['-tshift', tshift]
    cmd += [dset]
    nl.run(cmd, products=nl.suffix(dset, suffix))
コード例 #20
0
ファイル: alignment.py プロジェクト: azraq27/neural
def qwarp_apply(dset_from,dset_warp,affine=None,warp_suffix='_warp',master='WARP',interp=None,prefix=None):
    '''applies the transform from a previous qwarp

    Uses the warp parameters from the dataset listed in
    ``dset_warp`` (usually the dataset name ends in ``_WARP``)
    to the dataset ``dset_from``. If a ``.1D`` file is given
    in the ``affine`` parameter, it will be applied simultaneously
    with the qwarp.

    If the parameter ``interp`` is given, will use as interpolation method,
    otherwise it will just use the default (currently wsinc5)

    Output dataset with have the ``warp_suffix`` suffix added to its name
    '''
    out_dset = prefix
    if out_dset==None:
        out_dset = os.path.split(nl.suffix(dset_from,warp_suffix))[1]
    dset_from_info = nl.dset_info(dset_from)
    dset_warp_info = nl.dset_info(dset_warp)
    if(dset_from_info.orient!=dset_warp_info.orient):
        # If the datasets are different orientations, the transform won't be applied correctly
        nl.run(['3dresample','-orient',dset_warp_info.orient,'-prefix',nl.suffix(dset_from,'_reorient'),'-inset',dset_from],products=nl.suffix(dset_from,'_reorient'))
        dset_from = nl.suffix(dset_from,'_reorient')
    warp_opt = str(dset_warp)
    if affine:
        warp_opt += ' ' + affine
    cmd = [
        '3dNwarpApply',
        '-nwarp', warp_opt]
    cmd += [
        '-source', dset_from,
        '-master',master,
        '-prefix', out_dset
    ]

    if interp:
        cmd += ['-interp',interp]

    nl.run(cmd,products=out_dset)
コード例 #21
0
def affine_apply(dset_from,
                 affine_1D,
                 master,
                 affine_suffix='_aff',
                 interp='NN',
                 inverse=False,
                 prefix=None):
    '''apply the 1D file from a previously aligned dataset
    Applies the matrix in ``affine_1D`` to ``dset_from`` and makes the final grid look like the dataset ``master``
    using the interpolation method ``interp``. If ``inverse`` is True, will apply the inverse of ``affine_1D`` instead'''
    affine_1D_use = affine_1D
    if inverse:
        with tempfile.NamedTemporaryFile(delete=False) as temp:
            temp.write(subprocess.check_output(['cat_matvec', affine_1D,
                                                '-I']))
            affine_1D_use = temp.name
    if prefix == None:
        prefix = nl.suffix(dset_from, affine_suffix)
    nl.run([
        '3dAllineate', '-1Dmatrix_apply', affine_1D_use, '-input', dset_from,
        '-prefix', prefix, '-master', master, '-final', interp
    ],
           products=nl.suffix(dset_from, affine_suffix))
コード例 #22
0
def skull_strip(dset, suffix='_ns', prefix=None, unifize=True):
    ''' use bet to strip skull from given anatomy '''
    # should add options to use betsurf and T1/T2 in the future
    # Since BET fails on weirdly distributed datasets, I added 3dUnifize in... I realize this makes this dependent on AFNI. Sorry, :)
    if prefix == None:
        prefix = nl.suffix(dset, suffix)
    unifize_dset = nl.suffix(dset, '_u')
    cmd = bet2 if bet2 else 'bet2'
    if unifize:
        info = nl.dset_info(dset)
        if info == None:
            nl.notify('Error: could not read info for dset %s' % dset,
                      level=nl.level.error)
            return False
        cmd = os.path.join(fsl_dir, cmd) if fsl_dir else cmd
        cutoff_value = nl.max(dset) * 0.05
        nl.run([
            '3dUnifize', '-prefix', unifize_dset,
            nl.calc(dset, 'step(a-%f)*a' % cutoff_value)
        ],
               products=unifize_dset)
    else:
        unifize_dset = dset
    nl.run([cmd, unifize_dset, prefix, '-w', 0.5], products=prefix)
コード例 #23
0
ファイル: dsets.py プロジェクト: alistairwalsh/neural
def resample_dset(dset, template, prefix=None, resam='NN'):
    '''Resamples ``dset`` to the grid of ``template`` using resampling mode ``resam``.
    Default prefix is to suffix ``_resam`` at the end of ``dset``

    Available resampling modes:
        :NN:    Nearest Neighbor
        :Li:    Linear
        :Cu:    Cubic
        :Bk:    Blocky
    '''
    if prefix == None:
        prefix = nl.suffix(dset, '_resam')
    nl.run([
        '3dresample', '-master', template, '-rmode', resam, '-prefix', prefix,
        '-inset', dset
    ])
コード例 #24
0
ファイル: qc.py プロジェクト: alistairwalsh/neural
def inside_brain(stat_dset, atlas=None, p=0.001):
    '''calculates the percentage of voxels above a statistical threshold inside a brain mask vs. outside it
    
    if ``atlas`` is ``None``, it will try to find ``TT_N27``'''
    atlas = find_atlas(atlas)
    if atlas == None:
        return None
    mask_dset = nl.suffix(stat_dset, '_atlasfrac')
    nl.run([
        '3dfractionize', '-template',
        nl.strip_subbrick(stat_dset), '-input',
        nl.calc([atlas], '1+step(a-100)', datum='short'), '-preserve', '-clip',
        '0.2', '-prefix', mask_dset
    ],
           products=mask_dset,
           quiet=True,
           stderr=None)
    s = nl.roi_stats(mask_dset, nl.thresh(stat_dset, p))
    return 100.0 * s[2]['nzvoxels'] / (s[1]['nzvoxels'] + s[2]['nzvoxels'])
コード例 #25
0
def align_epi(anatomy, epis, suffix='_al', base=3, skull_strip=True):
    '''[[currently in progress]]: a simple replacement for the ``align_epi_anat.py`` script, because I've found it to be unreliable, in my usage'''
    for epi in epis:
        nl.tshift(epi, suffix='_tshift')
        nl.affine_align(nl.suffix(epi, '_tshift'),
                        '%s[%d]' % (epis[0], base),
                        skull_strip=False,
                        epi=True,
                        cost='crM',
                        resample='wsinc5',
                        grid_size=nl.dset_info(epi).voxel_size[0],
                        suffix='_al')
    ss = [anatomy] if skull_strip else False
    nl.affine_align(anatomy,
                    '%s[%d]' % (epis[0], base),
                    skull_strip=ss,
                    cost='hel',
                    grid_size=1,
                    opts=['-interp', 'cubic'],
                    suffix='_al-to-EPI')
コード例 #26
0
ファイル: decon.py プロジェクト: alistairwalsh/neural
def smooth_decon_to_fwhm(decon,fwhm,cache=True):
    '''takes an input :class:`Decon` object and uses ``3dBlurToFWHM`` to make the output as close as possible to ``fwhm``
    returns the final measured fwhm. If ``cache`` is ``True``, will save the blurred input file (and use it again in the future)'''
    if os.path.exists(decon.prefix):
        return
    blur_dset = lambda dset: nl.suffix(dset,'_smooth_to_%.2f' % fwhm)

    with nl.notify('Running smooth_decon_to_fwhm analysis (with %.2fmm blur)' % fwhm):
        tmpdir = tempfile.mkdtemp()
        try:
            cwd = os.getcwd()
            random_files = [re.sub(r'\[\d+\]$','',str(x)) for x in nl.flatten([x for x in decon.__dict__.values() if isinstance(x,basestring) or isinstance(x,list)]+[x.values() for x in decon.__dict__.values() if isinstance(x,dict)])]
            files_to_copy = [x for x in random_files if os.path.exists(x) and x[0]!='/']
            files_to_copy += [blur_dset(dset) for dset in decon.input_dsets if os.path.exists(blur_dset(dset))]
            # copy crap
            for file in files_to_copy:
                try:
                    shutil.copytree(file,tmpdir)
                except OSError as e:
                    shutil.copy(file,tmpdir)
                shutil.copy(file,tmpdir)

            copyback_files = [decon.prefix,decon.errts]
            with nl.run_in(tmpdir):
                if os.path.exists(decon.prefix):
                    os.remove(decon.prefix)

                # Create the blurred inputs (or load from cache)
                if cache and all([os.path.exists(os.path.join(cwd,blur_dset(dset))) for dset in decon.input_dsets]):
                    # Everything is already cached...
                    nl.notify('Using cache\'d blurred datasets')
                else:
                    # Need to make them from scratch
                    with nl.notify('Creating blurred datasets'):
                        old_errts = decon.errts
                        decon.errts = 'residual.nii.gz'
                        decon.prefix = os.path.basename(decon.prefix)
                        # Run once in place to get the residual dataset
                        decon.run()
                        running_reps = 0
                        for dset in decon.input_dsets:
                            info = nl.dset_info(dset)
                            residual_dset = nl.suffix(dset,'_residual')
                            nl.run(['3dbucket','-prefix',residual_dset,'%s[%d..%d]'%(decon.errts,running_reps,running_reps+info.reps-1)],products=residual_dset)
                            cmd = ['3dBlurToFWHM','-quiet','-input',dset,'-blurmaster',residual_dset,'-prefix',blur_dset(dset),'-FWHM',fwhm]
                            if decon.mask:
                                if decon.mask=='auto':
                                    cmd += ['-automask']
                                else:
                                    cmd += ['-mask',decon.mask]
                            nl.run(cmd,products=blur_dset(dset))
                            running_reps += info.reps
                            if cache:
                                copyback_files.append(blur_dset(dset))
                    decon.errts = old_errts
                decon.input_dsets = [blur_dset(dset) for dset in decon.input_dsets]
                for d in [decon.prefix,decon.errts]:
                    if os.path.exists(d):
                        try:
                            os.remove(d)
                        except:
                            pass
                decon.run()
                for copyfile in copyback_files:
                    if os.path.exists(copyfile):
                        shutil.copy(copyfile,cwd)
                    else:
                        nl.notify('Warning: deconvolve did not produce expected file %s' % decon.prefix,level=nl.level.warning)
        except:
            raise
        finally:
            shutil.rmtree(tmpdir,True)
コード例 #27
0
def affine_align(dset_from,
                 dset_to,
                 skull_strip=True,
                 mask=None,
                 suffix='_aff',
                 prefix=None,
                 cost=None,
                 epi=False,
                 resample='wsinc5',
                 grid_size=None,
                 opts=[]):
    ''' interface to 3dAllineate to align anatomies and EPIs '''

    dset_ss = lambda dset: os.path.split(nl.suffix(dset, '_ns'))[1]

    def dset_source(dset):
        if skull_strip == True or skull_strip == dset:
            return dset_ss(dset)
        else:
            return dset

    dset_affine = prefix
    if dset_affine == None:
        dset_affine = os.path.split(nl.suffix(dset_from, suffix))[1]
    dset_affine_mat_1D = nl.prefix(dset_affine) + '_matrix.1D'
    dset_affine_par_1D = nl.prefix(dset_affine) + '_params.1D'

    if os.path.exists(dset_affine):
        # final product already exists
        return

    for dset in [dset_from, dset_to]:
        if skull_strip == True or skull_strip == dset:
            nl.skull_strip(dset, '_ns')

    mask_use = mask
    if mask:
        # the mask was probably made in the space of the original dset_to anatomy,
        # which has now been cropped from the skull stripping. So the lesion mask
        # needs to be resampled to match the corresponding mask
        if skull_strip == True or skull_strip == dset_to:
            nl.run([
                '3dresample', '-master',
                dset_u(dset_ss(dset)), '-inset', mask, '-prefix',
                nl.suffix(mask, '_resam')
            ],
                   products=nl.suffix(mask, '_resam'))
            mask_use = nl.suffix(mask, '_resam')

    all_cmd = [
        '3dAllineate', '-prefix', dset_affine, '-base',
        dset_source(dset_to), '-source',
        dset_source(dset_from), '-1Dmatrix_save', dset_affine_mat_1D,
        '-1Dparam_save', dset_affine_par_1D, '-autoweight', '-final', resample,
        '-cmass'
    ] + opts
    if grid_size:
        all_cmd += ['-newgrid', grid_size]
    if cost:
        all_cmd += ['-cost', cost]
    if epi:
        all_cmd += ['-EPI']
    if mask:
        all_cmd += ['-emask', mask_use]

    nl.run(all_cmd, products=dset_affine)
コード例 #28
0
ファイル: alignment.py プロジェクト: azraq27/neural
def qwarp_align(dset_from,dset_to,skull_strip=True,mask=None,affine_suffix='_aff',suffix='_qwarp',prefix=None):
    '''aligns ``dset_from`` to ``dset_to`` using 3dQwarp

    Will run ``3dSkullStrip`` (unless ``skull_strip`` is ``False``), ``3dUnifize``,
    ``3dAllineate``, and then ``3dQwarp``. This method will add suffixes to the input
    dataset for the intermediate files (e.g., ``_ss``, ``_u``). If those files already
    exist, it will assume they were intelligently named, and use them as is

    :skull_strip:       If True/False, turns skull-stripping of both datasets on/off.
                        If a string matching ``dset_from`` or ``dset_to``, will only
                        skull-strip the given dataset
    :mask:              Applies the given mask to the alignment. Because of the nature
                        of the alignment algorithms, the mask is **always** applied to
                        the ``dset_to``. If this isn't what you want, you need to reverse
                        the transform and re-apply it (e.g., using :meth:`qwarp_invert`
                        and :meth:`qwarp_apply`). If the ``dset_to`` dataset is skull-stripped,
                        the mask will also be resampled to match the ``dset_to`` grid.
    :affine_suffix:     Suffix applied to ``dset_from`` to name the new dataset, as well as
                        the ``.1D`` file.
    :suffix:            Suffix applied to the final ``dset_from`` dataset. An additional file
                        with the additional suffix ``_WARP`` will be created containing the parameters
                        (e.g., with the default ``_qwarp`` suffix, the parameters will be in a file with
                        the suffix ``_qwarp_WARP``)
    :prefix:            Alternatively to ``suffix``, explicitly give the full output filename

    The output affine dataset and 1D, as well as the output of qwarp are named by adding
    the given suffixes (``affine_suffix`` and ``qwarp_suffix``) to the ``dset_from`` file

    If ``skull_strip`` is a string instead of ``True``/``False``, it will only skull strip the given
    dataset instead of both of them

    # TODO: currently does not work with +tlrc datasets because the filenames get mangled
    '''

    dset_ss = lambda dset: os.path.split(nl.suffix(dset,'_ns'))[1]
    dset_u = lambda dset: os.path.split(nl.suffix(dset,'_u'))[1]
    def dset_source(dset):
        if skull_strip==True or skull_strip==dset:
            return dset_ss(dset)
        else:
            return dset

    dset_affine = os.path.split(nl.suffix(dset_from,affine_suffix))[1]
    dset_affine_1D = nl.prefix(dset_affine) + '.1D'
    dset_qwarp = prefix
    if dset_qwarp==None:
        dset_qwarp = os.path.split(nl.suffix(dset_from,suffix))[1]

    if os.path.exists(dset_qwarp):
        # final product already exists
        return

    affine_align(dset_from,dset_to,skull_strip,mask,affine_suffix)

    for dset in [dset_from,dset_to]:
        nl.run([
            '3dUnifize',
            '-prefix', dset_u(dset_source(dset)),
            '-input', dset_source(dset)
        ],products=[dset_u(dset_source(dset))])

    mask_use = mask
    if mask:
        # the mask was probably made in the space of the original dset_to anatomy,
        # which has now been cropped from the skull stripping. So the lesion mask
        # needs to be resampled to match the corresponding mask
        if skull_strip==True or skull_strip==dset_to:
            nl.run(['3dresample','-master',dset_u(dset_ss(dset)),'-inset',mask,'-prefix',nl.suffix(mask,'_resam')],products=nl.suffix(mask,'_resam'))
            mask_use = nl.suffix(mask,'_resam')

    warp_cmd = [
        '3dQwarp',
        '-prefix', dset_qwarp,
        '-duplo', '-useweight', '-blur', '0', '3',
        '-iwarp',
        '-base', dset_u(dset_source(dset_to)),
        '-source', dset_affine
    ]

    if mask:
        warp_cmd += ['-emask', mask_use]

    nl.run(warp_cmd,products=dset_qwarp)
コード例 #29
0
ファイル: decon.py プロジェクト: azraq27/neural
def smooth_decon_to_fwhm(decon,fwhm,cache=True):
    '''takes an input :class:`Decon` object and uses ``3dBlurToFWHM`` to make the output as close as possible to ``fwhm``
    returns the final measured fwhm. If ``cache`` is ``True``, will save the blurred input file (and use it again in the future)'''
    if os.path.exists(decon.prefix):
        return
    blur_dset = lambda dset: nl.suffix(dset,'_smooth_to_%.2f' % fwhm)

    with nl.notify('Running smooth_decon_to_fwhm analysis (with %.2fmm blur)' % fwhm):
        tmpdir = tempfile.mkdtemp()
        try:
            cwd = os.getcwd()
            random_files = [re.sub(r'\[\d+\]$','',str(x)) for x in nl.flatten([x for x in decon.__dict__.values() if isinstance(x,basestring) or isinstance(x,list)]+[x.values() for x in decon.__dict__.values() if isinstance(x,dict)])]
            files_to_copy = [x for x in random_files if os.path.exists(x) and x[0]!='/']
            files_to_copy += [blur_dset(dset) for dset in decon.input_dsets if os.path.exists(blur_dset(dset))]
            # copy crap
            for file in files_to_copy:
                try:
                    shutil.copytree(file,tmpdir)
                except OSError as e:
                    shutil.copy(file,tmpdir)
                shutil.copy(file,tmpdir)

            copyback_files = [decon.prefix,decon.errts]
            with nl.run_in(tmpdir):
                if os.path.exists(decon.prefix):
                    os.remove(decon.prefix)

                # Create the blurred inputs (or load from cache)
                if cache and all([os.path.exists(os.path.join(cwd,blur_dset(dset))) for dset in decon.input_dsets]):
                    # Everything is already cached...
                    nl.notify('Using cache\'d blurred datasets')
                else:
                    # Need to make them from scratch
                    with nl.notify('Creating blurred datasets'):
                        old_errts = decon.errts
                        decon.errts = 'residual.nii.gz'
                        decon.prefix = os.path.basename(decon.prefix)
                        # Run once in place to get the residual dataset
                        decon.run()
                        running_reps = 0
                        for dset in decon.input_dsets:
                            info = nl.dset_info(dset)
                            residual_dset = nl.suffix(dset,'_residual')
                            nl.run(['3dbucket','-prefix',residual_dset,'%s[%d..%d]'%(decon.errts,running_reps,running_reps+info.reps-1)],products=residual_dset)
                            cmd = ['3dBlurToFWHM','-quiet','-input',dset,'-blurmaster',residual_dset,'-prefix',blur_dset(dset),'-FWHM',fwhm]
                            if decon.mask:
                                if decon.mask=='auto':
                                    cmd += ['-automask']
                                else:
                                    cmd += ['-mask',decon.mask]
                            nl.run(cmd,products=blur_dset(dset))
                            running_reps += info.reps
                            if cache:
                                copyback_files.append(blur_dset(dset))
                    decon.errts = old_errts
                decon.input_dsets = [blur_dset(dset) for dset in decon.input_dsets]
                for d in [decon.prefix,decon.errts]:
                    if os.path.exists(d):
                        try:
                            os.remove(d)
                        except:
                            pass
                decon.run()
                for copyfile in copyback_files:
                    if os.path.exists(copyfile):
                        shutil.copy(copyfile,cwd)
                    else:
                        nl.notify('Warning: deconvolve did not produce expected file %s' % decon.prefix,level=nl.level.warning)
        except:
            raise
        finally:
            shutil.rmtree(tmpdir,True)
コード例 #30
0
ファイル: afni.py プロジェクト: alistairwalsh/neural
def tshift(dset, suffix='_tshift', initial_ignore=3):
    nl.run([
        '3dTshift', '-prefix',
        nl.suffix(dset, suffix), '-ignore', initial_ignore, dset
    ],
           products=nl.suffix(dset, suffix))
コード例 #31
0
def blur(dset, fwhm, prefix=None):
    '''blurs ``dset`` with given ``fwhm`` runs 3dmerge to blur dataset to given ``fwhm``
    default ``prefix`` is to suffix ``dset`` with ``_blur%.1fmm``'''
    if prefix == None:
        prefix = nl.suffix(dset, '_blur%.1fmm' % fwhm)
    return available_method('blur')(dset, fwhm, prefix)
コード例 #32
0
def qwarp_align(dset_from,
                dset_to,
                skull_strip=True,
                mask=None,
                affine_suffix='_aff',
                suffix='_qwarp',
                prefix=None):
    '''aligns ``dset_from`` to ``dset_to`` using 3dQwarp
    
    Will run ``3dSkullStrip`` (unless ``skull_strip`` is ``False``), ``3dUnifize``,
    ``3dAllineate``, and then ``3dQwarp``. This method will add suffixes to the input
    dataset for the intermediate files (e.g., ``_ss``, ``_u``). If those files already
    exist, it will assume they were intelligently named, and use them as is
    
    :skull_strip:       If True/False, turns skull-stripping of both datasets on/off.
                        If a string matching ``dset_from`` or ``dset_to``, will only
                        skull-strip the given dataset
    :mask:              Applies the given mask to the alignment. Because of the nature
                        of the alignment algorithms, the mask is **always** applied to
                        the ``dset_to``. If this isn't what you want, you need to reverse
                        the transform and re-apply it (e.g., using :meth:`qwarp_invert` 
                        and :meth:`qwarp_apply`). If the ``dset_to`` dataset is skull-stripped,
                        the mask will also be resampled to match the ``dset_to`` grid.
    :affine_suffix:     Suffix applied to ``dset_from`` to name the new dataset, as well as
                        the ``.1D`` file.
    :suffix:            Suffix applied to the final ``dset_from`` dataset. An additional file
                        with the additional suffix ``_WARP`` will be created containing the parameters
                        (e.g., with the default ``_qwarp`` suffix, the parameters will be in a file with
                        the suffix ``_qwarp_WARP``)
    :prefix:            Alternatively to ``suffix``, explicitly give the full output filename
    
    The output affine dataset and 1D, as well as the output of qwarp are named by adding
    the given suffixes (``affine_suffix`` and ``qwarp_suffix``) to the ``dset_from`` file
    
    If ``skull_strip`` is a string instead of ``True``/``False``, it will only skull strip the given
    dataset instead of both of them
    
    # TODO: currently does not work with +tlrc datasets because the filenames get mangled
    '''

    dset_ss = lambda dset: os.path.split(nl.suffix(dset, '_ns'))[1]
    dset_u = lambda dset: os.path.split(nl.suffix(dset, '_u'))[1]

    def dset_source(dset):
        if skull_strip == True or skull_strip == dset:
            return dset_ss(dset)
        else:
            return dset

    dset_affine = os.path.split(nl.suffix(dset_from, affine_suffix))[1]
    dset_affine_1D = nl.prefix(dset_affine) + '.1D'
    dset_qwarp = prefix
    if dset_qwarp == None:
        dset_qwarp = os.path.split(nl.suffix(dset_from, suffix))[1]

    if os.path.exists(dset_qwarp):
        # final product already exists
        return

    affine_align(dset_from, dset_to, skull_strip, mask, affine_suffix)

    for dset in [dset_from, dset_to]:
        nl.run([
            '3dUnifize', '-prefix',
            dset_u(dset_source(dset)), '-input',
            dset_source(dset)
        ],
               products=[dset_u(dset_source(dset))])

    mask_use = mask
    if mask:
        # the mask was probably made in the space of the original dset_to anatomy,
        # which has now been cropped from the skull stripping. So the lesion mask
        # needs to be resampled to match the corresponding mask
        if skull_strip == True or skull_strip == dset_to:
            nl.run([
                '3dresample', '-master',
                dset_u(dset_ss(dset)), '-inset', mask, '-prefix',
                nl.suffix(mask, '_resam')
            ],
                   products=nl.suffix(mask, '_resam'))
            mask_use = nl.suffix(mask, '_resam')

    warp_cmd = [
        '3dQwarp', '-prefix', dset_qwarp, '-duplo', '-useweight', '-blur', '0',
        '3', '-iwarp', '-base',
        dset_u(dset_source(dset_to)), '-source', dset_affine
    ]

    if mask:
        warp_cmd += ['-emask', mask_use]

    nl.run(warp_cmd, products=dset_qwarp)
コード例 #33
0
ファイル: common.py プロジェクト: azraq27/neural
def cluster(dset,min_distance,min_cluster_size,prefix=None):
    '''clusters given ``dset`` connecting voxels ``min_distance``mm away with minimum cluster size of ``min_cluster_size``
    default prefix is ``dset`` suffixed with ``_clust%d``'''
    if prefix==None:
        prefix = nl.suffix(dset,'_clust%d' % min_cluster_size)
    return available_method('cluster')(dset,min_distance,min_cluster_size,prefix)
コード例 #34
0
ファイル: common.py プロジェクト: azraq27/neural
def blur(dset,fwhm,prefix=None):
    '''blurs ``dset`` with given ``fwhm`` runs 3dmerge to blur dataset to given ``fwhm``
    default ``prefix`` is to suffix ``dset`` with ``_blur%.1fmm``'''
    if prefix==None:
        prefix = nl.suffix(dset,'_blur%.1fmm'%fwhm)
    return available_method('blur')(dset,fwhm,prefix)
コード例 #35
0
ファイル: afni.py プロジェクト: azraq27/neural
def tshift(dset,suffix='_tshift',initial_ignore=3):
    nl.run(['3dTshift','-prefix',nl.suffix(dset,suffix),'-ignore',initial_ignore,dset],products=nl.suffix(dset,suffix))