Пример #1
0
def run_toafni(config, env):
    '''convert surfaces to AFNI (or SUMA, rather) format'''
    cmds = []

    sd = config['sumadir']
    sid = config['sid']

    if sid is None:
        raise ValueError("Subject id is not set, cannot continue")
    fs_sid = config['fs_sid']

    # files that should exist if Make_Spec_FS was run successfully
    checkfns = ['brainmask.nii', 'T1.nii', 'aseg.nii']

    filesexist = all([os.path.exists('%s/%s' % (sd, fn)) for fn in checkfns])

    if config['overwrite'] or not filesexist:
        if config['overwrite']:
            if filesexist:
                cmds.append('rm -rf "%s"' % sd)
        cmds.append('cd %(surfdir)s;@SUMA_Make_Spec_FS -sid %(sid)s -no_ld' %
                    config)
        utils.run_cmds(cmds, env)
    else:
        print "SUMA conversion appears to have been performed already for %s in %s" % (
            sid, sd)
Пример #2
0
def run_toafni(config, env):
    '''convert surfaces to AFNI (or SUMA, rather) format'''
    cmds = []

    sd = config['sumadir']
    sid = config['sid']

    if sid is None:
        raise ValueError("Subject id is not set, cannot continue")
    fs_sid = config['fs_sid']

    # files that should exist if Make_Spec_FS was run successfully
    checkfns = ['brainmask.nii',
              'T1.nii',
              'aseg.nii']

    filesexist = all([os.path.exists('%s/%s' % (sd, fn)) for fn in checkfns])

    if config['overwrite'] or not filesexist:
        if config['overwrite']:
            if filesexist:
                cmds.append('rm -rf "%s"' % sd)
        cmds.append('cd %(surfdir)s;@SUMA_Make_Spec_FS -sid %(sid)s -no_ld' % config)
        utils.run_cmds(cmds, env)
    else:
        print "SUMA conversion appears to have been performed already for %s in %s" % (sid, sd)
Пример #3
0
def run_mapico(config, env):
    '''run MapIcosehedron to convert surfaces to standard topology'''
    sumadir = config['sumadir']
    firstcmd = 'cd "%s" || exit 1' % sumadir
    cmds = []
    icolds, hemis = _get_hemis_icolds(config)
    sid = config['sid']  # subject id
    ext = '.asc'  # input is always ascii
    for icold in icolds:
        icoprefix = config['mi_icopat'] % icold
        spherefns = []
        for hemi in hemis:
            if not config['overwrite']:
                # the last file that is generated by MapIcosahedron
                lastsurffn = '%s/%s%sh.sphere.reg%s' % (
                    sumadir, config['mi_icopat'] % icold, hemi, ext)
                spherefns.append(lastsurffn)
                if os.path.exists(lastsurffn):
                    print(
                        "Seems MapIcosahedron was already run for %sh with ld=%d"
                        % (hemi, icold))
                    continue

            cmd = (
                'MapIcosahedron -overwrite -spec %s_%sh.spec -ld %d -fix_cut_surfaces -prefix %s'
                % (sid, hemi, icold, icoprefix))
            cmds.append(cmd)
        if cmds:
            cmd = '%s;%s' % (firstcmd, ';'.join(cmds))
            utils.run_cmds(cmd, env)
            cmds = []
        if len(spherefns) == 2 and 'l' in hemis and 'r' in hemis:
            spheres = map(surf.read, spherefns)

            mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
            mappathfn = os.path.join(sumadir, mapfn)

            if config['overwrite'] or not os.path.exists(mappathfn):
                eps = .001
                print "Computing bijection between nodes (ico=%d) - this may take a while" % icold
                bijection = surf.get_sphere_left_right_mapping(
                    spheres[0], spheres[1], eps)

                with open(mappathfn, 'w') as f:
                    f.write('\n'.join(map(str, bijection)))

                    print "Written bijection to %s" % mappathfn
Пример #4
0
def run_mapico(config, env):
    """run MapIcosehedron to convert surfaces to standard topology"""
    sumadir = config["sumadir"]
    firstcmd = 'cd "%s" || exit 1' % sumadir
    cmds = []
    icolds, hemis = _get_hemis_icolds(config)
    sid = config["sid"]  # subject id
    ext = ".asc"  # input is always ascii
    for icold in icolds:
        icoprefix = config["mi_icopat"] % icold
        spherefns = []
        for hemi in hemis:
            if not config["overwrite"]:
                # the last file that is generated by MapIcosahedron
                lastsurffn = "%s/%s%sh.sphere.reg%s" % (sumadir, config["mi_icopat"] % icold, hemi, ext)
                spherefns.append(lastsurffn)
                if os.path.exists(lastsurffn):
                    print ("Seems MapIcosahedron was already run for %sh with ld=%d" % (hemi, icold))
                    continue

            cmd = "MapIcosahedron -overwrite -spec %s_%sh.spec -ld %d -fix_cut_surfaces -prefix %s" % (
                sid,
                hemi,
                icold,
                icoprefix,
            )
            cmds.append(cmd)
        if cmds:
            cmd = "%s;%s" % (firstcmd, ";".join(cmds))
            utils.run_cmds(cmd, env)
            cmds = []
        if len(spherefns) == 2 and "l" in hemis and "r" in hemis:
            spheres = map(surf.read, spherefns)

            mapfn = (config["mi_icopat"] % icold) + config["hemimappingsuffix"]
            mappathfn = pathjoin(sumadir, mapfn)

            if config["overwrite"] or not os.path.exists(mappathfn):
                eps = 0.001
                print "Computing bijection between nodes (ico=%d) - this may take a while" % icold
                bijection = surf.get_sphere_left_right_mapping(spheres[0], spheres[1], eps)

                with open(mappathfn, "w") as f:
                    f.write("\n".join(map(str, bijection)))

                    print "Written bijection to %s" % mappathfn
Пример #5
0
def run_mapico(config, env):
    '''run MapIcosehedron to convert surfaces to standard topology'''
    sumadir = config['sumadir']
    firstcmd = 'cd "%s" || exit 1' % sumadir
    cmds = []
    icolds, hemis = _get_hemis_icolds(config)
    sid = config['sid'] # subject id
    for icold in icolds:
        icoprefix = config['mi_icopat'] % icold
        spherefns = []
        for hemi in hemis:
            if not config['overwrite']:
                # the last file that is generated by MapIcosahedron
                lastsurffn = '%s/%s%sh.sphere.reg.asc' % (sumadir, config['mi_icopat'] % icold, hemi)
                spherefns.append(lastsurffn)
                if os.path.exists(lastsurffn):
                    print("Seems MapIcosahedron was already run for %sh with ld=%d" % (hemi, icold))
                    continue

            cmd = ('MapIcosahedron -overwrite -spec %s_%sh.spec -ld %d -prefix %s' %
                       (sid, hemi, icold, icoprefix))
            cmds.append(cmd)
        if cmds:
            cmd = '%s;%s' % (firstcmd, ';'.join(cmds))
            utils.run_cmds(cmd, env)
            cmds = []
        if len(spherefns) == 2 and 'l' in hemis and 'r' in hemis:
            spheres = map(surf.read, spherefns)

            mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
            mappathfn = os.path.join(sumadir, mapfn)


            if config['overwrite'] or not os.path.exists(mappathfn):
                eps = .001
                print "Computing bijection between nodes (ico=%d) - this may take a while" % icold
                bijection = surf_fs_asc.sphere_reg_leftrightmapping(spheres[0],
                                                                spheres[1],
                                                                eps)

                with open(mappathfn, 'w') as f:
                    f.write('\n'.join(map(str, bijection)))

                    print "Written bijection to %s" % mappathfn
Пример #6
0
def compute_fwhm(config):
    # helper function - called by critical_clustersize
    # computes FWHM of residuals of input data and stores in config
    output_dir = c['output_dir']

    is_surf = _is_surf(config)
    ext, ext1D = _ext(config), _ext(config, for1D=True)
    if not os.path.exists(output_dir):
        os.mkdir(output_dir)


    cmds = ['cd "%s"' % output_dir]

    # if surfaces and needs padding, do that first
    pad_to_node = config['pad_to_node']
    if is_surf and pad_to_node:
        data_files = []

        for i, fn in enumerate(c['data_files']):
            fn_pad = 'pad_%d%s' % (i, ext)
            cmds.append("; ConvertDset -overwrite -pad_to_node %d -input %s'[%d]' -prefix ./%s" %
                                (pad_to_node, fn, config['brik_index'], fn_pad))
            data_files.append(fn_pad)
        pad_files = data_files
        brik_index = 0
    else:
        data_files = c['data_files']
        pad_files = []
        brik_index = c['brik_index']

    # bucket data from all participants into a single file
    buck_fn = _fn(config, 'buck')

    cmds.append('; 3dbucket -overwrite -prefix %s' % buck_fn)
    for fn in data_files:
        cmds.append(" %s'[%d]'" % (fn, brik_index))

    # also store as 1D (won't hurt)
    if is_surf:
        buck_fn_1D = _fn(config, 'buck', ext1D)
        cmds.append('; ConvertDset -overwrite -o_1D -prefix %s -input %s' %
                    (buck_fn_1D, buck_fn))
    else:
        buck_fn_1D = buck_fn

    # compute group mean
    mean_fn = _fn(config, 'mean')
    cmds.append('; 3dTstat -overwrite -prefix %s %s' % (mean_fn, buck_fn))

    # compute residuals, and estimate FWHM for each of them
    # store FWHM output in fwhm_fn
    fwhm_fn = pathjoin(output_dir, _fn(config, 'fwhm', '.1D'))
    cmds.append('; echo > "%s"' % fwhm_fn)

    resid_fns = []
    for i in xrange(len(c['data_files'])):
        fn = _fn(config, 'resid_%d' % i)
        cmds.append("; 3dcalc -overwrite -prefix %s -a %s -b %s'[%d]' -expr 'a-b'"
                    % (fn, mean_fn, buck_fn, i))
        msk = _mask_expr(config)
        if is_surf:
            surf_fn = c['surface_file']
            cmds.append("; SurfFWHM %s -input %s -i_fs %s"
                        "| grep ^FWHM  | cut -f2 -d'=' >> '%s'" %
                        (msk, fn, surf_fn, fwhm_fn))
        else:
            cmds.append('; 3dFWHMx %s %s | cut -c18- >> %s' % (msk, fn, fwhm_fn))
        resid_fns.append(fn)

    cmd = ''.join(cmds)
    utils.run_cmds(cmd)

    # read FWHM values and store in config
    with open(fwhm_fn) as f:
        fwhms = f.read().split()

    print fwhms
    print fwhm_fn

    config['all_fwhms'] = fwhms # all FWHMs (for each participant)
    config['fwhm'] = sum(map(float, fwhms)) / len(fwhms) # average FWHM
    config['buck_fn'] = buck_fn
    config['buck_fn_1D'] = buck_fn_1D

    mean_fwhm_fn = pathjoin(output_dir, _fn(config, 'mean_fwhm', '.1D'))
    with open(mean_fwhm_fn, 'w') as f:
        f.write('%.3f\n' % config['fwhm'])

    tmpfns = resid_fns + pad_files + [mean_fn]
    print "TEMP"
    print tmpfns
    _remove_files(config, tmpfns)
Пример #7
0
def apply_clustersize(config):
    # applies the critical cluster size to the original data
    #
    # assumes that critical_clustersize(config) has been run

    output_dir = config['output_dir']
    pthr = config['pthr']
    tthr = config['tthr']
    niter = config['niter']
    buck_fn_1D = config['buck_fn_1D']
    is_surf = _is_surf(config)

    if is_surf:
        surf_fn = config['surface_file']

    cmds = ['cd "%s"' % output_dir]

    # run ttest on original data
    infix = 'ttest_t%(tthr)s' % config
    ttest_fn = _fn(config, infix)
    msk = _mask_expr(config)

    # NOTE: for surfaces, apply mask below (SurfClust)
    #       but in volumes, apply it here
    if is_surf:
        cmds.append('; 3dttest++ -ok_1D_text -overwrite -prefix %s -setA %s' % (ttest_fn, buck_fn_1D))
    else:
        cmds.append('; 3dttest++ %s -overwrite -prefix %s -setA %s' % (msk, ttest_fn, buck_fn_1D))

    # sort cluster sizes
    clsize = list(config['max_size'])
    clsize.sort()

    # get critical cluster size
    idx = _critical_size_index(config)
    critical_size = clsize[idx]

    print "critical size %s (p=%s)" % (critical_size, pthr)

    # apply critical size to t-test of original data
    infix += '_clustp%s_%dit' % (pthr, niter)

    if not is_surf:
        # for surfaces the size is included in the filename automatically
        infix += '_%svx' % critical_size

    # set file names
    dset_out = _fn(config, infix)
    log_out = _fn(config, infix, '.txt')

    if is_surf:
        cmds.append('; SurfClust %s -i_fs %s -input %s 1 -rmm -1 '
                    ' -thresh %f -thresh_col 1 -amm2 %f -out_clusterdset -prefix %s > %s' %
                        (msk, surf_fn, ttest_fn, tthr, critical_size, dset_out, log_out))
    else:
        dset_out_msk = _fn(config, infix + '_msk')
        cmds.append("; 3dclust -overwrite -1noneg -1clip %f  "
                    " -prefix %s -savemask %s 0 -%f %s'[1]' > %s" %
                    (tthr, dset_out, dset_out_msk, critical_size, ttest_fn, log_out))

    cmd = "".join(cmds)
    utils.run_cmds(cmd)
Пример #8
0
def run_skullstrip(config, env):

    if config['identity']:
        return

    overwrite = config['overwrite']
    refdir = config['refdir']
    cmds = []
    if not os.path.exists(refdir):
        cmds.append('mkdir %(refdir)s' % config)

    sumadir = config['sumadir']
    sid = config['sid']
    fs_sid = config['fs_sid']

    if not sid:
        raise ValueError("Subject id is not set, cannot continue")

    # process the surfvol anatomical.
    # because it's already skull stripped by freesurfer
    # simply copy it over; rename brain.nii to surfvol_ss
    surfvol_srcs = ['%s/%s' % (sumadir, fn)
                  for fn in ['brain.nii',
                             'T1.nii']]

    surfvol_trgs = ['%s/%s' % (refdir, fn)
                  for fn in ['%s_SurfVol_ss+orig.HEAD' % sid,
                             '%s_SurfVol+orig.HEAD' % sid]]

    for src, trg in zip(surfvol_srcs, surfvol_trgs):
        if os.path.exists(trg) and not overwrite:
            print '%s already exists' % trg
        else:
            t_p, t_n, t_o, t_e = utils.afni_fileparts(trg)
            trg_short = '%s%s' % (t_n, t_o)
            cmds.append('cd "%s"; 3dcopy -overwrite %s ./%s' %
                        (refdir, src, trg_short))

    # process experimental volume.
    expvol_src = config['expvol']
    do_ss = config['expvol_ss']
    [e_p, e_n, e_o, e_e] = utils.afni_fileparts(expvol_src)

    expvol_trg_prefix = '%s%s' % (e_n, config['sssuffix'] if do_ss else '')

    if 'nii' in e_e:
        # ensure e_n+orig is in refdir
        if overwrite or not utils.afni_fileexists('%s/%s+orig.HEAD' % (refdir, e_n)):
            print "Converting %s from NIFTI to AFNI format" % e_n
            cmds.append('cd "%s"; 3dbucket -overwrite -prefix ./%s+orig %s' % (refdir, e_n, expvol_src))
            cmds.append('if [ -e %s/%s+tlrc.HEAD ]; then 3drefit -view orig -space ORIG %s/%s+tlrc; else echo "File in orig orientation - no refit necessary"; fi' % (refdir, e_n, refdir, e_n))

        expvol_src = '%s/%s+orig.HEAD' % (refdir, e_n)

    expvol_trg = '%s/%s+orig.HEAD' % (refdir, expvol_trg_prefix)

    print "Attempt %s -> %s" % (expvol_src, expvol_trg)

    if overwrite or not utils.afni_fileexists(expvol_trg):
        if do_ss:
            cmds.append('cd "%s";3dSkullStrip -overwrite -prefix ./%s+orig -input %s' %
                            (refdir, expvol_trg_prefix, expvol_src))
        else:
            cmds.append('cd "%s";3dbucket -overwrite -prefix ./%s+orig %s' %
                            (refdir, expvol_trg_prefix, expvol_src))
    else:
        print "No skull strip because already exists: %s+orig" % expvol_trg_prefix

    utils.run_cmds(cmds, env)
Пример #9
0
def apply_clustersize(config):
    # applies the critical cluster size to the original data
    #
    # assumes that critical_clustersize(config) has been run

    output_dir = config['output_dir']
    pthr = config['pthr']
    tthr = config['tthr']
    niter = config['niter']
    buck_fn_1D = config['buck_fn_1D']
    is_surf = _is_surf(config)

    if is_surf:
        surf_fn = config['surface_file']

    cmds = ['cd "%s"' % output_dir]

    # run ttest on original data
    infix = 'ttest_t%(tthr)s' % config
    ttest_fn = _fn(config, infix)
    msk = _mask_expr(config)

    # NOTE: for surfaces, apply mask below (SurfClust)
    #       but in volumes, apply it here
    if is_surf:
        cmds.append('; 3dttest++ -ok_1D_text -overwrite -prefix %s -setA %s' % (ttest_fn, buck_fn_1D))
    else:
        cmds.append('; 3dttest++ %s -overwrite -prefix %s -setA %s' % (msk, ttest_fn, buck_fn_1D))

    # sort cluster sizes
    clsize = list(config['max_size'])
    clsize.sort()

    # get critical cluster size
    idx = _critical_size_index(config)
    critical_size = clsize[idx]

    print("critical size %s (p=%s)" % (critical_size, pthr))

    # apply critical size to t-test of original data
    infix += '_clustp%s_%dit' % (pthr, niter)

    if not is_surf:
        # for surfaces the size is included in the filename automatically
        infix += '_%svx' % critical_size

    # set file names
    dset_out = _fn(config, infix)
    log_out = _fn(config, infix, '.txt')

    if is_surf:
        cmds.append('; SurfClust %s -i_fs %s -input %s 1 -rmm -1 '
                    ' -thresh %f -thresh_col 1 -amm2 %f -out_clusterdset -prefix %s > %s' %
                        (msk, surf_fn, ttest_fn, tthr, critical_size, dset_out, log_out))
    else:
        dset_out_msk = _fn(config, infix + '_msk')
        cmds.append("; 3dclust -overwrite -1noneg -1clip %f  "
                    " -prefix %s -savemask %s 0 -%f %s'[1]' > %s" %
                    (tthr, dset_out, dset_out_msk, critical_size, ttest_fn, log_out))

    cmd = "".join(cmds)
    utils.run_cmds(cmd)
Пример #10
0
def afni_niml_zscore_makefull_wizard(cfg):
    # ensure we have access to the AFNI binary
    instacorrbin = '3dSetupGroupInCorr'
    if utils.which(instacorrbin) is None:
        raise ValueError("could not locate binary %r" % instacorrbin)

    # get configuration values
    fns = cfg['filenames']
    prefix = cfg['prefix']

    group_prefix = cfg['group_prefix']
    group_postfix = cfg.get('grouppostfix', None)
    if group_postfix is None:
        group_postfix = _smart_filename_decode(fns, 'postfix')
        if not group_postfix is None:
            print "Using automatic postfix %s" % group_postfix

    overwrite = cfg['overwrite']

    # figure out to where to pad to node to
    pad_to_node = cfg.get('pad_to_node', None)

    if pad_to_node is None:
        pad_to_ico_ld = cfg.get('pad_to_ico_ld', None)
        if pad_to_ico_ld is None:
            pad_to_ico_ld = _smart_filename_decode(fns, 'pad_to_ico_ld')
            if not pad_to_ico_ld is None:
                pad_to_node = pad_to_ico_ld * pad_to_ico_ld * 10 + 2
                print "Using automatic pad_to_ico_ld=%r, pad_to_node=%r" % (pad_to_ico_ld, pad_to_node)

    if pad_to_node:
        pad_to_node = int(pad_to_node)

    # process each of the input files            
    fnouts = []
    for fn in fns:
        [pth, nm] = os.path.split(fn)

        fnout = os.path.join(pth, prefix + nm)

        if os.path.exists(fnout) and not overwrite:
            print("Output file %s already exists, skipping (use '--overwrite' to override)" % fnout)
        else:
            afni_niml_zscore_makefull(fn, fnout, pad_to_node=pad_to_node)
            print "Converted %s -> %s (in %s)" % (nm, prefix + nm, pth)

        fnouts.append(fnout)

    [pth, nm] = os.path.split(group_prefix)

    ext = '.niml.dset'

    if ext.endswith(ext):
        ext = ext[:(len(nm) - len(ext))]

    fullprefix = nm if group_postfix is None else '%s%s' % (nm, group_postfix)
    fullname = os.path.join(pth, fullprefix)



    cmds = ['cd "%s"; ' % pth]

    groupfnsout = ['%s.grpincorr.%s' % (fullname, ext) for ext in ['niml', 'data']]

    if any(map(os.path.exists, groupfnsout)):
        if overwrite:
            cmds.extend('rm %s;' % fn for fn in groupfnsout)
        else:
            print("Some or all of output files (%s) already exists (use '--overwrite' to override)" % (" ".join(groupfnsout)))

    cmds.append('%s -prefix ./%s' % (instacorrbin, fullprefix))
    cmds.extend(' %s' % fn for fn in fnouts)


    cmd = "".join(cmds)

    utils.run_cmds(cmd)

    msg = ("\n\nTo view the results in SUMA:\n"
    "- open a terminal window\n"
    "- run 'cd %s; 3dGroupInCorr -setA %s.grpincorr.niml -suma'\n"
    "- open a second terminal window\n"
    "- cd to a directory with surfaces, run 'suma -spec SPECFILE.spec -niml '\n"
    "- in SUMA, select a node while holding down ctrl+shift" %
    (pth, fullprefix))

    print msg
Пример #11
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses align_epi_anat.py
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    fullext = config['outvol_fullext']
    ext = config['outvol_ext']

    if config['sid'] is None:
        raise ValueError('Need sid')

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol%(outvol_fullext)s' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol%(sssuffix)s%(outvol_fullext)s' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s%s' % (refdir, e_n, fullext)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1]  # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0"  # we all like unity, don't we?

    fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix
    aloutfns = ['%s%s' % (ssalprefix, fullext),
                fullmatrixfn]  # expected output files if alignment worked
    if config['overwrite'] or not all(
        [os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
        alignedfn = '%s/%s%s' % (refdir, ssalprefix, fullext)

        if config['identity']:
            fullmatrix_content = '"MATRIX(%s)"' % unity.replace(" ", ",")

            cmd = 'cd "%s"; cat_matvec %s > %s; 3dcopy -overwrite %s%s %s%s%s' % (
                refdir, fullmatrix_content, fullmatrixfn, a_n, ext, a_n,
                alignsuffix, ext)
        else:
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = (
                '-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None'
                if config['isepi'] else
                '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None'
            )
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            if config['template']:
                aea_opts += " -Allineate_opts '-maxrot 10 -maxshf 10 -maxscl 1.5'"
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (
                refdir, alignsuffix, twovolsuffix, aea_opts)

        cmds.append(cmd)

        cmds.append(_set_vol_space_cmd(alignedfn, config))

        utils.run_cmds(cmds, env)

    else:
        print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly

    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' %
                                                     (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' %
                    (refdir, fullmatrixfn, matrixfn))

    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s%s' % (refdir, alprefix, fullext)

    newgrid = 1  # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
    # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append(
            'cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s'
            % (refdir, newgrid, matrixfn, alprefix, surfvolfn))
        cmds.append(
            _set_vol_space_cmd('%s/%s+orig' % (refdir, alprefix), config))

    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s%s' % (f, fullext) for f in [ssalprefix, alprefix]]
    headernukefields = [
        'ALLINEATE_MATVEC_B2S_000000', 'ALLINEATE_MATVEC_S2B_000000',
        'WARPDRIVE_MATVEC_FOR_000000', 'WARPDRIVE_MATVEC_INV_000000'
    ]

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (
                refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)
    utils.run_cmds('; '.join(cmds), env)
    cmds = []

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        use_ss = config['expvol_ss']

        # ae_{e,s}_n are AddEdge names for expvol and surfvol
        ae_e_n = utils.afni_fileparts(config['expvol'])[1]
        if use_ss:
            ae_e_n += config['sssuffix']
        ae_s_n = ssalprefix  #if use_ss else alprefix

        # *_ne have the output extension as well
        ae_e_ne = ae_e_n + ext
        ae_s_ne = ae_s_n + ext

        addedge_fns = ['%s/_ae.ExamineList.log' % refdir]

        exts = ['HEAD', 'BRIK']
        orig_ext = '+orig'
        addedge_rootfns = [
            '%s_%s%%s' % (ae_e_n, postfix)
            for postfix in ['e3', 'ec', ae_s_n + '_ec']
        ]
        addedge_rootfns.extend(
            ['%s_%s%%s' % (ae_s_n, postfix) for postfix in ['e3', 'ec']])

        addedge_fns_pat = [
            '%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts
        ]

        addegde_pathfns_orig = map(lambda x: os.path.join(refdir, x % '+orig'),
                                   addedge_fns_pat) + addedge_fns
        addegde_pathfns_ext = map(lambda x: os.path.join(refdir, x % ext),
                                  addedge_fns_pat)
        addegde_exists = map(os.path.exists, addegde_pathfns_ext)
        if overwrite or not all(addegde_exists):
            ae_ns = (ae_e_n, ae_s_n)

            cmds.extend(
                map(lambda fn: 'if [ -e "%s" ]; then rm "%s"; fi' % (fn, fn),
                    addegde_pathfns_orig + addegde_pathfns_ext))
            cmds.append(';'.join(['cd %s' % refdir] + [
                _convert_vol_space_to_orig_cmd('%s/%s%s' % (refdir, n, ext))
                for n in ae_ns
            ] + ['\@AddEdge %s+orig %s+orig' % ae_ns]))

            set_space_fns = addegde_pathfns_orig + [
                '%s/%s%s.%s' % (refdir, fn, orig_ext, exts[0]) for fn in ae_ns
            ]

            for fn in set_space_fns:  #['%s/%s' % (refdir, fn % orig_ext) for fn in addedge_fns_pat]:
                if fn.endswith('.log'):
                    continue
                cmds.append('if [ -e %s ]; then %s; fi' %
                            (fn, _set_vol_space_cmd(fn, config)))

            utils.run_cmds(cmds, env)
            cmds = []

        else:
            print "AddEdge seems to have been run already"

        sid = config['sid']
        plot_slice_fns = [
            (ae_e_n + '_e3', ae_s_n + '_e3', '%s_qa_e3.png' % sid),
            (None, ae_e_n + '_' + ae_s_n + '_ec', '%s_qa_ec.png' % sid)
        ]

        plot_slice_imgfns = ['%s/%s' % (refdir, fn) for fn in plot_slice_fns]
        if overwrite or not all(map(os.path.exists, plot_slice_imgfns)):
            slice_dims = [0, 1, 2]
            slice_pos = [.35, .45, .55, .65]
            for fns in plot_slice_fns:
                input_fns = []
                for i, fn in enumerate(fns):
                    if fn is not None:
                        fn = '%s/%s' % (refdir, fn)
                        if i <= 1:
                            fn += ext
                    input_fns.append(fn)

                fn1, fn2, fnout = input_fns
                if not os.path.exists(fnout):
                    _make_slice_plot(fn1, fn2, fnout)
                    print "QA Image saved to %s" % fnout
                else:
                    print "Already exists: %s" % fnout
        else:
            print "QA images already exist"

    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = (
            'cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
            (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)

    origext = '.asc'
    ext = format2extension(config)
    tp = format2type(config)
    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*%s' % (config['mi_icopat'] % icold, hemi, origext)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    if not sumafile.endswith(origext):
                        raise ValueError("%s does not end with %s" %
                                         (sumafile, origext))
                    #s = sumafile.split(".")
                    #s[len(s) - 2] += config['alsuffix'] # insert '_al' just before last dot
                    #alsumafile = ".".join(s)
                    extsumafile = sumafile[:-len(origext)]
                    alsumafile = extsumafile + config['alsuffix'] + ext

                    if config['overwrite'] or not os.path.exists(
                            '%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_%s ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, tp, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg%s' % (pat, ext)):
                        sumaout = '%s/%s' % (refdir, extsumafile + ext)
                        if config['overwrite'] or not os.path.exists(sumaout):
                            s = surf.read('%s/%s' % (sumadir, sumafile))
                            surf.write(s, sumaout)
                            #cmds.append('cp %s/%s %s/%s' % (sumadir, sumafile, refdir, sumafile))

        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = os.path.join(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = os.path.join(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Пример #12
0
def run_makesurfmasks(config, env):
    refdir = config["refdir"]
    overwrite = config["overwrite"]

    if config["sid"] is None:
        raise ValueError("Need sid")

    sumfn = "%s_qa_surf_mask" % config["sid"]  # output file

    fullext = config["outvol_fullext"]
    volor = config["outvol_ext"]

    sumfn_path = "%s/%s%s" % (refdir, sumfn, fullext)
    qafn_path = "%s/%s.png" % (refdir, sumfn)
    checkfn_paths = (sumfn_path, qafn_path)
    if all(map(os.path.exists, checkfn_paths)) and not overwrite:
        print "Already exist: %s" % (", ".join(checkfn_paths))
        return

    icolds, hemis = _get_hemis_icolds(config)

    volexts = ["%s%s" % (volor, e) for e in ".HEAD", ".BRIK*"]

    sssuffix = config["sssuffix"] if config["expvol_ss"] else ""
    expvol_fn = "%s%s%s" % (utils.afni_fileparts(config["expvol"])[1], sssuffix, volor)

    # if overwrite or not os.path.exists('%s/%s' % (refdir, sv_al_nii_fn)):
    #    cmd = 'cd %s; 3dcopy -overwrite %s %s' % (refdir, sv_al_orig_fn, sv_al_nii_fn)
    #    utils.run_cmds(cmd, env)

    if hemis != ["l", "r"]:
        raise ValueError("Cannot run without left and right hemisphere")

    icold = max(icolds)

    oneDfn = "__t.1D"
    oneDtfn = "__tt.1D"  # transposed
    cmds = [
        "cd %s" % refdir,
        "1deval -1D: -num %d -expr 1 > %s" % (icold ** 2 * 10 + 1, oneDfn),
        "1dtranspose %s > %s" % (oneDfn, oneDtfn),
    ]

    utils.run_cmds(";".join(cmds), env)

    tmpfns = [oneDfn, oneDtfn]

    s2v_cmd = (
        '3dSurf2Vol -map_func mask2 -data_expr "a*%%d" -spec %%s %%s -sv %s'
        " -grid_parent %s. -prefix %%s -sdata %s -overwrite"
    ) % (expvol_fn, expvol_fn, oneDtfn)

    infix2val = {"-surf_A pial": 1, "-surf_A smoothwm": 2, "-surf_A smoothwm -surf_B pial -f_steps 20": 4}

    volfns = []
    for hemi in hemis:
        specfn = afni_suma_spec.canonical_filename(icold, hemi, config["alsuffix"])

        for infix, val in infix2val.iteritems():
            fnprefix = "__m%d_%sh" % (val, hemi)
            cmd = s2v_cmd % (val, specfn, infix, fnprefix)
            utils.run_cmds("cd %s;%s" % (refdir, cmd))
            tmpfns.extend(["%s%s" % (fnprefix, e) for e in volexts])
            volfns.append(fnprefix + volor)

    cmds = ["cd %s" % refdir]
    catfn = "__cat"
    cmds.extend(
        [
            "3dTcat -overwrite -prefix %s %s" % (catfn, " ".join(volfns)),
            "3dTstat -overwrite -sum -prefix %s %s%s" % (sumfn, catfn, volor),
        ]
    )
    tmpfns.extend(["%s%s" % (catfn, e) for e in volexts])

    cmds.extend("rm %s" % fn for fn in tmpfns)
    cmds.append('echo "Surface mask in %s"' % sumfn)

    utils.run_cmds(";".join(cmds), env)

    # make plot
    if overwrite or not os.path.exists(qafn_path):
        expvol_path = "%s/%s" % (refdir, expvol_fn)
        _make_slice_plot(expvol_path, sumfn_path, qafn_path)
Пример #13
0
def afni_niml_zscore_makefull_wizard(cfg):
    # ensure we have access to the AFNI binary
    instacorrbin = '3dSetupGroupInCorr'
    if utils.which(instacorrbin) is None:
        raise ValueError("could not locate binary %r" % instacorrbin)

    # get configuration values
    fns = cfg['filenames']
    prefix = cfg['prefix']

    group_prefix = cfg['group_prefix']
    group_postfix = cfg.get('grouppostfix', None)
    if group_postfix is None:
        group_postfix = _smart_filename_decode(fns, 'postfix')
        if not group_postfix is None:
            print "Using automatic postfix %s" % group_postfix

    overwrite = cfg['overwrite']

    # figure out to where to pad to node to
    pad_to_node = cfg.get('pad_to_node', None)

    if pad_to_node is None:
        pad_to_ico_ld = cfg.get('pad_to_ico_ld', None)
        if pad_to_ico_ld is None:
            pad_to_ico_ld = _smart_filename_decode(fns, 'pad_to_ico_ld')
            if not pad_to_ico_ld is None:
                pad_to_node = pad_to_ico_ld * pad_to_ico_ld * 10 + 2
                print "Using automatic pad_to_ico_ld=%r, pad_to_node=%r" % (pad_to_ico_ld, pad_to_node)

    if pad_to_node:
        pad_to_node = int(pad_to_node)

    # process each of the input files            
    fnouts = []
    for fn in fns:
        [pth, nm] = os.path.split(fn)

        fnout = pathjoin(pth, prefix + nm)

        if os.path.exists(fnout) and not overwrite:
            print("Output file %s already exists, skipping (use '--overwrite' to override)" % fnout)
        else:
            afni_niml_zscore_makefull(fn, fnout, pad_to_node=pad_to_node)
            print "Converted %s -> %s (in %s)" % (nm, prefix + nm, pth)

        fnouts.append(fnout)

    [pth, nm] = os.path.split(group_prefix)

    ext = '.niml.dset'

    if ext.endswith(ext):
        ext = ext[:(len(nm) - len(ext))]

    fullprefix = nm if group_postfix is None else '%s%s' % (nm, group_postfix)
    fullname = pathjoin(pth, fullprefix)



    cmds = ['cd "%s"; ' % pth]

    groupfnsout = ['%s.grpincorr.%s' % (fullname, ext) for ext in ['niml', 'data']]

    if any(map(os.path.exists, groupfnsout)):
        if overwrite:
            cmds.extend('rm %s;' % fn for fn in groupfnsout)
        else:
            print("Some or all of output files (%s) already exists (use '--overwrite' to override)" % (" ".join(groupfnsout)))

    cmds.append('%s -prefix ./%s' % (instacorrbin, fullprefix))
    cmds.extend(' %s' % fn for fn in fnouts)


    cmd = "".join(cmds)

    utils.run_cmds(cmd)

    msg = ("\n\nTo view the results in SUMA:\n"
    "- open a terminal window\n"
    "- run 'cd %s; 3dGroupInCorr -setA %s.grpincorr.niml -suma'\n"
    "- open a second terminal window\n"
    "- cd to a directory with surfaces, run 'suma -spec SPECFILE.spec -niml '\n"
    "- in SUMA, select a node while holding down ctrl+shift" %
    (pth, fullprefix))

    print msg
Пример #14
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses @SUMA_AlignToExperiment
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol+orig.HEAD' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol_ss+orig.HEAD' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s+orig.HEAD' % (refdir, e_n)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1]  # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0"  # we all like unity, don't we?
    if config['identity']:
        fullmatrixfn = '"MATRIX(%s)"' % unity.replace(" ", ",")
    else:
        fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix

        aloutfns = ['%s+orig.HEAD' % ssalprefix,
                    fullmatrixfn]  # expected output files if alignment worked

        if config['overwrite'] or not all(
            [os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = (
                '-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None'
                if config['isepi'] else
                '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None'
            )
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (
                refdir, alignsuffix, twovolsuffix, aea_opts)
            cmds.append(cmd)

        else:
            print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly
        utils.run_cmds(cmds, env)

    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' %
                                                     (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' %
                    (refdir, fullmatrixfn, matrixfn))

    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s+orig.HEAD' % (refdir, alprefix)

    newgrid = 1  # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
    # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append(
            'cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s'
            % (refdir, newgrid, matrixfn, alprefix, surfvolfn))
    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s+orig.HEAD' % f for f in [ssalprefix, alprefix]]
    headernukefields = [
        'ALLINEATE_MATVEC_B2S_000000', 'ALLINEATE_MATVEC_S2B_000000',
        'WARPDRIVE_MATVEC_FOR_000000', 'WARPDRIVE_MATVEC_INV_000000'
    ]

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (
                refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        basedset = volsin[1]
        [d, n, o, e] = utils.afni_fileparts(basedset)
        if 'nii' in e:
            o = '+orig'
            if overwrite or not os.path.exists('%s/%s+orig.HEAD' % refdir, n):
                cmds.append('cd %s; 3dcopy -overwrite %s.nii %s%s' %
                            (refdir, n, n, o))

        dset = '%s+orig.HEAD' % alprefix
        n_dset = utils.afni_fileparts(dset)[1]

        addedge_fns = ['_ae.ExamineList.log']

        exts = ['HEAD', 'BRIK']
        addedge_rootfns = [
            '%s_%s+orig' % (n, postfix)
            for postfix in ['e3', 'ec', n_dset + '_ec']
        ]
        addedge_rootfns.extend(
            ['%s_%s+orig' % (n_dset, postfix) for postfix in ['e3', 'ec']])

        addedge_fns = [
            '%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts
        ]

        addegde_pathfns = map(lambda x: os.path.join(refdir, x), addedge_fns)

        addegde_exists = map(os.path.exists, addegde_pathfns)
        if overwrite or not all(addegde_exists):
            if overwrite:
                cmds.extend(map(lambda fn: 'rm "%s"' % fn, addegde_pathfns))
            cmds.append('cd %s; \@AddEdge %s%s %s' % (refdir, n, o, dset))
        else:
            print "AddEdge seems to have been run already"

    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = (
            'cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
            (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)

    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*.asc' % (config['mi_icopat'] % icold, hemi)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    s = sumafile.split(".")
                    s[len(s) - 2] += config[
                        'alsuffix']  # insert '_al' just before last dot
                    alsumafile = ".".join(s)

                    if config['overwrite'] or not os.path.exists(
                            '%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_fs ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg.asc' % pat):
                        if config['overwrite'] or not os.path.exists(
                                '%s/%s' % (refdir, sumafile)):
                            cmds.append('cp %s/%s %s/%s' %
                                        (sumadir, sumafile, refdir, sumafile))

        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = os.path.join(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = os.path.join(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Пример #15
0
def run_skullstrip(config, env):

    if config['identity']:
        return

    overwrite = config['overwrite']
    refdir = config['refdir']
    cmds = []
    if not os.path.exists(refdir):
        cmds.append('mkdir %(refdir)s' % config)

    sumadir = config['sumadir']
    sid = config['sid']
    fs_sid = config['fs_sid']

    if not sid:
        raise ValueError("Subject id is not set, cannot continue")

    # process the surfvol anatomical.
    # because it's already skull stripped by freesurfer
    # simply copy it over; rename brain.nii to surfvol_ss
    surfvol_srcs = ['%s/%s' % (sumadir, fn) for fn in ['brain.nii', 'T1.nii']]

    surfvol_trgs = [
        '%s/%s' % (refdir, fn) for fn in
        ['%s_SurfVol_ss+orig.HEAD' %
         sid, '%s_SurfVol+orig.HEAD' % sid]
    ]

    for src, trg in zip(surfvol_srcs, surfvol_trgs):
        if os.path.exists(trg) and not overwrite:
            print '%s already exists' % trg
        else:
            t_p, t_n, t_o, t_e = utils.afni_fileparts(trg)
            trg_short = '%s%s' % (t_n, t_o)
            cmds.append('cd "%s"; 3dcopy -overwrite %s ./%s' %
                        (refdir, src, trg_short))

    # process experimental volume.
    expvol_src = config['expvol']
    do_ss = config['expvol_ss']
    [e_p, e_n, e_o, e_e] = utils.afni_fileparts(expvol_src)

    expvol_trg_prefix = '%s%s' % (e_n, config['sssuffix'] if do_ss else '')

    if 'nii' in e_e:
        # ensure e_n+orig is in refdir
        if overwrite or not utils.afni_fileexists('%s/%s+orig.HEAD' %
                                                  (refdir, e_n)):
            print "Converting %s from NIFTI to AFNI format" % e_n
            cmds.append('cd "%s"; 3dbucket -overwrite -prefix ./%s+orig %s' %
                        (refdir, e_n, expvol_src))
            cmds.append(
                'if [ -e %s/%s+tlrc.HEAD ]; then 3drefit -view orig -space ORIG %s/%s+tlrc; else echo "File in orig orientation - no refit necessary"; fi'
                % (refdir, e_n, refdir, e_n))

        expvol_src = '%s/%s+orig.HEAD' % (refdir, e_n)

    expvol_trg = '%s/%s+orig.HEAD' % (refdir, expvol_trg_prefix)

    print "Attempt %s -> %s" % (expvol_src, expvol_trg)

    if overwrite or not utils.afni_fileexists(expvol_trg):
        if do_ss:
            cmds.append(
                'cd "%s";3dSkullStrip -overwrite -prefix ./%s+orig -input %s' %
                (refdir, expvol_trg_prefix, expvol_src))
        else:
            cmds.append('cd "%s";3dbucket -overwrite -prefix ./%s+orig %s' %
                        (refdir, expvol_trg_prefix, expvol_src))
    else:
        print "No skull strip because already exists: %s+orig" % expvol_trg_prefix

    utils.run_cmds(cmds, env)
Пример #16
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses @SUMA_AlignToExperiment
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol+orig.HEAD' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol_ss+orig.HEAD' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s+orig.HEAD' % (refdir, e_n)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1] # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0" # we all like unity, don't we?
    if config['identity']:
        fullmatrixfn = '"MATRIX(%s)"' % unity.replace(" ", ",")
    else:
        fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix

        aloutfns = ['%s+orig.HEAD' % ssalprefix, fullmatrixfn] # expected output files if alignment worked

        if config['overwrite'] or not all([os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = ('-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None' if config['isepi']
                       else '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None')
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (refdir, alignsuffix, twovolsuffix, aea_opts)
            cmds.append(cmd)

        else:
            print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly
        utils.run_cmds(cmds, env)

    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' % (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' % (refdir, fullmatrixfn, matrixfn))


    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s+orig.HEAD' % (refdir, alprefix)

    newgrid = 1 # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
              # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append('cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s' %
                    (refdir, newgrid, matrixfn, alprefix, surfvolfn))
    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s+orig.HEAD' % f for f in [ssalprefix, alprefix]]
    headernukefields = ['ALLINEATE_MATVEC_B2S_000000',
                      'ALLINEATE_MATVEC_S2B_000000',
                      'WARPDRIVE_MATVEC_FOR_000000',
                      'WARPDRIVE_MATVEC_INV_000000']

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        basedset = volsin[1]
        [d, n, o, e] = utils.afni_fileparts(basedset)
        if 'nii' in e:
            o = '+orig'
            if overwrite or not os.path.exists('%s/%s+orig.HEAD' % refdir, n):
                cmds.append('cd %s; 3dcopy -overwrite %s.nii %s%s' % (refdir, n, n, o))

        dset = '%s+orig.HEAD' % alprefix
        n_dset = utils.afni_fileparts(dset)[1]

        addedge_fns = ['_ae.ExamineList.log']

        exts = ['HEAD', 'BRIK']
        addedge_rootfns = ['%s_%s+orig' % (n, postfix)
                            for postfix in ['e3', 'ec', n_dset + '_ec']]
        addedge_rootfns.extend(['%s_%s+orig' % (n_dset, postfix)
                            for postfix in ['e3', 'ec']])

        addedge_fns = ['%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts]

        addegde_pathfns = map(lambda x:os.path.join(refdir, x), addedge_fns)

        addegde_exists = map(os.path.exists, addegde_pathfns)
        if overwrite or not all(addegde_exists):
            if overwrite:
                cmds.extend(map(lambda fn : 'rm "%s"' % fn, addegde_pathfns))
            cmds.append('cd %s; \@AddEdge %s%s %s' % (refdir, n, o, dset))
        else:
            print "AddEdge seems to have been run already"

    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = ('cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
             (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)

    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*.asc' % (config['mi_icopat'] % icold, hemi)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    s = sumafile.split(".")
                    s[len(s) - 2] += config['alsuffix'] # insert '_al' just before last dot
                    alsumafile = ".".join(s)

                    if config['overwrite'] or not os.path.exists('%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_fs ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg.asc' % pat):
                        if config['overwrite'] or not os.path.exists('%s/%s' % (refdir, sumafile)):
                            cmds.append('cp %s/%s %s/%s' % (sumadir, sumafile, refdir, sumafile))


        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = os.path.join(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = os.path.join(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Пример #17
0
def run_skullstrip(config, env):
    fullext = config['outvol_fullext']

    overwrite = config['overwrite']
    refdir = config['refdir']
    cmds = []
    if not os.path.exists(refdir):
        cmds.append('mkdir %(refdir)s' % config)

    sumadir = config['sumadir']
    sid = config['sid']
    fs_sid = config['fs_sid']

    if not sid:
        raise ValueError("Subject id is not set, cannot continue")

    # process the surfvol anatomical.
    # because it's already skull stripped by freesurfer
    # simply copy it over; rename brain.nii to surfvol_ss
    surfvol_srcs = ['%s/%s' % (sumadir, fn) for fn in ['brain.nii', 'T1.nii']]

    surfvol_trgs = [
        '%s/%s' % (refdir, fn) for fn in
        ['%s_SurfVol_ss%s' % (sid, fullext),
         '%s_SurfVol%s' % (sid, fullext)]
    ]

    for src, trg in zip(surfvol_srcs, surfvol_trgs):
        if os.path.exists(trg) and not overwrite:
            print '%s already exists' % trg
        else:
            t_p, t_n, t_o, t_e = utils.afni_fileparts(trg)
            trg_short = '%s%s' % (t_n, t_o)
            cmds.append(
                'cd "%s"; 3dresample -overwrite -orient LPI -inset %s -prefix ./%s'
                % (refdir, src, trg_short))
            cmds.append(
                _set_vol_space_cmd('%s/%s+orig' % (refdir, t_n), config))

    # process experimental volume.
    expvol_src = config['expvol']
    do_ss = config['expvol_ss']
    [e_p, e_n, e_o, e_e] = utils.afni_fileparts(expvol_src)

    expvol_trg_prefix = '%s%s' % (e_n, config['sssuffix'] if do_ss else '')
    expvol_trg_tmp_prefix = '__tmp_%s' % expvol_trg_prefix

    expvol_trg = '%s/%s%s' % (refdir, expvol_trg_prefix, fullext)

    print "Attempt %s -> %s" % (expvol_src, expvol_trg)

    ext = config['outvol_ext']

    if overwrite or not utils.afni_fileexists(expvol_trg):
        if do_ss:
            cmds.append(
                'cd "%s";3dSkullStrip -overwrite -prefix ./%s%s -input %s' %
                (refdir, expvol_trg_tmp_prefix, ext, expvol_src))
        else:
            cmds.append('cd "%s";3dbucket -overwrite -prefix ./%s%s %s' %
                        (refdir, expvol_trg_tmp_prefix, ext, expvol_src))
        cmds.append(
            'cd "%s"; 3dresample -overwrite -orient LPI -prefix %s -inset %s%s'
            % (refdir, expvol_trg_prefix, expvol_trg_tmp_prefix, ext))
        cmds.append('rm %s/%s*' % (refdir, expvol_trg_tmp_prefix))
        cmds.append(_set_vol_space_cmd(expvol_trg, config))
    else:
        print "No skull strip because already exists: %s%s" % (
            expvol_trg_prefix, ext)

    utils.run_cmds(cmds, env)
Пример #18
0
def run_skullstrip(config, env):
    fullext = config['outvol_fullext']

    overwrite = config['overwrite']
    refdir = config['refdir']
    cmds = []
    if not os.path.exists(refdir):
        cmds.append('mkdir %(refdir)s' % config)

    sumadir = config['sumadir']
    sid = config['sid']
    fs_sid = config['fs_sid']

    if not sid:
        raise ValueError("Subject id is not set, cannot continue")

    # process the surfvol anatomical.
    # because it's already skull stripped by freesurfer
    # simply copy it over; rename brain.nii to surfvol_ss
    surfvol_srcs = ['%s/%s' % (sumadir, fn)
                  for fn in ['brain.nii',
                             'T1.nii']]

    surfvol_trgs = ['%s/%s' % (refdir, fn)
                  for fn in ['%s_SurfVol_ss%s' % (sid, fullext),
                             '%s_SurfVol%s' % (sid, fullext)]]

    for src, trg in zip(surfvol_srcs, surfvol_trgs):
        if os.path.exists(trg) and not overwrite:
            print '%s already exists' % trg
        else:
            t_p, t_n, t_o, t_e = utils.afni_fileparts(trg)
            trg_short = '%s%s' % (t_n, t_o)
            cmds.append('cd "%s"; 3dresample -overwrite -orient LPI -inset %s -prefix ./%s' %
                        (refdir, src, trg_short))
            cmds.append(_set_vol_space_cmd('%s/%s+orig' % (refdir, t_n), config))

    # process experimental volume.
    expvol_src = config['expvol']
    do_ss = config['expvol_ss']
    [e_p, e_n, e_o, e_e] = utils.afni_fileparts(expvol_src)

    expvol_trg_prefix = '%s%s' % (e_n, config['sssuffix'] if do_ss else '')
    expvol_trg_tmp_prefix = '__tmp_%s' % expvol_trg_prefix

    expvol_trg = '%s/%s%s' % (refdir, expvol_trg_prefix, fullext)

    print "Attempt %s -> %s" % (expvol_src, expvol_trg)

    ext = config['outvol_ext']

    if overwrite or not utils.afni_fileexists(expvol_trg):
        if do_ss:
            cmds.append('cd "%s";3dSkullStrip -overwrite -prefix ./%s%s -input %s' %
                            (refdir, expvol_trg_tmp_prefix, ext, expvol_src))
        else:
            cmds.append('cd "%s";3dbucket -overwrite -prefix ./%s%s %s' %
                            (refdir, expvol_trg_tmp_prefix, ext, expvol_src))
        cmds.append('cd "%s"; 3dresample -overwrite -orient LPI -prefix %s -inset %s%s' %
                            (refdir, expvol_trg_prefix, expvol_trg_tmp_prefix, ext))
        cmds.append('rm %s/%s*' % (refdir, expvol_trg_tmp_prefix))
        cmds.append(_set_vol_space_cmd(expvol_trg, config))
    else:
        print "No skull strip because already exists: %s%s" % (expvol_trg_prefix, ext)

    utils.run_cmds(cmds, env)
Пример #19
0
def run_makesurfmasks(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']

    if config['sid'] is None:
        raise ValueError('Need sid')

    sumfn = '%s_qa_surf_mask' % config['sid']  # output file

    fullext = config['outvol_fullext']
    volor = config['outvol_ext']

    sumfn_path = '%s/%s%s' % (refdir, sumfn, fullext)
    qafn_path = '%s/%s.png' % (refdir, sumfn)
    checkfn_paths = (sumfn_path, qafn_path)
    if all(map(os.path.exists, checkfn_paths)) and not overwrite:
        print "Already exist: %s" % (", ".join(checkfn_paths))
        return

    icolds, hemis = _get_hemis_icolds(config)

    volexts = ['%s%s' % (volor, e) for e in '.HEAD', '.BRIK*']

    sssuffix = config['sssuffix'] if config['expvol_ss'] else ''
    expvol_fn = '%s%s%s' % (utils.afni_fileparts(
        config['expvol'])[1], sssuffix, volor)

    #if overwrite or not os.path.exists('%s/%s' % (refdir, sv_al_nii_fn)):
    #    cmd = 'cd %s; 3dcopy -overwrite %s %s' % (refdir, sv_al_orig_fn, sv_al_nii_fn)
    #    utils.run_cmds(cmd, env)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    icold = max(icolds)

    oneDfn = '__t.1D'
    oneDtfn = '__tt.1D'  # transposed
    cmds = [
        'cd %s' % refdir,
        '1deval -1D: -num %d -expr 1 > %s' % (icold**2 * 10 + 1, oneDfn),
        '1dtranspose %s > %s' % (oneDfn, oneDtfn)
    ]

    utils.run_cmds(';'.join(cmds), env)

    tmpfns = [oneDfn, oneDtfn]

    s2v_cmd = ('3dSurf2Vol -map_func mask2 -data_expr "a*%%d" -spec %%s %%s -sv %s'
             ' -grid_parent %s. -prefix %%s -sdata %s -overwrite') % \
                                (expvol_fn, expvol_fn, oneDtfn)

    infix2val = {
        '-surf_A pial': 1,
        '-surf_A smoothwm': 2,
        '-surf_A smoothwm -surf_B pial -f_steps 20': 4
    }

    volfns = []
    for hemi in hemis:
        specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                   config['alsuffix'])

        for infix, val in infix2val.iteritems():
            fnprefix = '__m%d_%sh' % (val, hemi)
            cmd = s2v_cmd % (val, specfn, infix, fnprefix)
            utils.run_cmds('cd %s;%s' % (refdir, cmd))
            tmpfns.extend(['%s%s' % (fnprefix, e) for e in volexts])
            volfns.append(fnprefix + volor)

    cmds = ['cd %s' % refdir]
    catfn = '__cat'
    cmds.extend([
        '3dTcat -overwrite -prefix %s %s' % (catfn, ' '.join(volfns)),
        '3dTstat -overwrite -sum -prefix %s %s%s' % (sumfn, catfn, volor)
    ])
    tmpfns.extend(['%s%s' % (catfn, e) for e in volexts])

    cmds.extend('rm %s' % fn for fn in tmpfns)
    cmds.append('echo "Surface mask in %s"' % sumfn)

    utils.run_cmds(';'.join(cmds), env)

    # make plot
    if overwrite or not os.path.exists(qafn_path):
        expvol_path = '%s/%s' % (refdir, expvol_fn)
        _make_slice_plot(expvol_path, sumfn_path, qafn_path)
Пример #20
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses align_epi_anat.py
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    fullext = config['outvol_fullext']
    ext = config['outvol_ext']

    if config['sid'] is None:
        raise ValueError('Need sid')

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol%(outvol_fullext)s' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol%(sssuffix)s%(outvol_fullext)s' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s%s' % (refdir, e_n, fullext)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1] # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0" # we all like unity, don't we?

    fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix
    aloutfns = ['%s%s' % (ssalprefix, fullext), fullmatrixfn] # expected output files if alignment worked
    if config['overwrite'] or not all([os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
        alignedfn = '%s/%s%s' % (refdir, ssalprefix, fullext)

        if config['identity']:
            fullmatrix_content = '"MATRIX(%s)"' % unity.replace(" ", ",")

            cmd = 'cd "%s"; cat_matvec %s > %s; 3dcopy -overwrite %s%s %s%s%s' % (refdir, fullmatrix_content, fullmatrixfn, a_n, ext, a_n, alignsuffix, ext)
        else:
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = ('-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None' if config['isepi']
                       else '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None')
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            if config['template']:
                aea_opts += " -Allineate_opts '-maxrot 10 -maxshf 10 -maxscl 1.5'"
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (refdir, alignsuffix, twovolsuffix, aea_opts)

        cmds.append(cmd)

        cmds.append(_set_vol_space_cmd(alignedfn, config))

        utils.run_cmds(cmds, env)

    else:
        print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly


    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' % (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' % (refdir, fullmatrixfn, matrixfn))


    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s%s' % (refdir, alprefix, fullext)

    newgrid = 1 # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
              # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append('cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s' %
                    (refdir, newgrid, matrixfn, alprefix, surfvolfn))
        cmds.append(_set_vol_space_cmd('%s/%s+orig' % (refdir, alprefix), config))

    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s%s' % (f, fullext) for f in [ssalprefix, alprefix]]
    headernukefields = ['ALLINEATE_MATVEC_B2S_000000',
                      'ALLINEATE_MATVEC_S2B_000000',
                      'WARPDRIVE_MATVEC_FOR_000000',
                      'WARPDRIVE_MATVEC_INV_000000']

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)
    utils.run_cmds('; '.join(cmds), env)
    cmds = []

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        use_ss = config['expvol_ss']

        # ae_{e,s}_n are AddEdge names for expvol and surfvol
        ae_e_n = utils.afni_fileparts(config['expvol'])[1]
        if use_ss:
            ae_e_n += config['sssuffix']
        ae_s_n = ssalprefix #if use_ss else alprefix

        # *_ne have the output extension as well
        ae_e_ne = ae_e_n + ext
        ae_s_ne = ae_s_n + ext

        addedge_fns = ['%s/_ae.ExamineList.log' % refdir]

        exts = ['HEAD', 'BRIK']
        orig_ext = '+orig'
        addedge_rootfns = ['%s_%s%%s' % (ae_e_n, postfix)
                            for postfix in ['e3', 'ec', ae_s_n + '_ec']]
        addedge_rootfns.extend(['%s_%s%%s' % (ae_s_n, postfix)
                            for postfix in ['e3', 'ec']])

        addedge_fns_pat = ['%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts]

        addegde_pathfns_orig = map(lambda x:pathjoin(refdir, x % '+orig'), addedge_fns_pat) + addedge_fns
        addegde_pathfns_ext = map(lambda x:pathjoin(refdir, x % ext), addedge_fns_pat)
        addegde_exists = map(os.path.exists, addegde_pathfns_ext)
        if overwrite or not all(addegde_exists):
            ae_ns = (ae_e_n, ae_s_n)

            cmds.extend(map(lambda fn : 'if [ -e "%s" ]; then rm "%s"; fi' % (fn, fn), addegde_pathfns_orig + addegde_pathfns_ext))
            cmds.append(';'.join(['cd %s' % refdir] +
                                 [_convert_vol_space_to_orig_cmd('%s/%s%s' % (refdir, n, ext))
                                            for n in ae_ns] +
                                 ['\@AddEdge %s+orig %s+orig' % ae_ns]))

            set_space_fns = addegde_pathfns_orig + ['%s/%s%s.%s' % (refdir, fn, orig_ext, exts[0]) for fn in ae_ns]

            for fn in set_space_fns: #['%s/%s' % (refdir, fn % orig_ext) for fn in addedge_fns_pat]:
                if fn.endswith('.log'):
                    continue
                cmds.append('if [ -e %s ]; then %s; fi' % (fn, _set_vol_space_cmd(fn, config)))

            utils.run_cmds(cmds, env)
            cmds = []

        else:
            print "AddEdge seems to have been run already"

        sid = config['sid']
        plot_slice_fns = [(ae_e_n + '_e3', ae_s_n + '_e3', '%s_qa_e3.png' % sid),
                          (None, ae_e_n + '_' + ae_s_n + '_ec', '%s_qa_ec.png' % sid)]


        plot_slice_imgfns = ['%s/%s' % (refdir, fn) for fn in plot_slice_fns]
        if overwrite or not all(map(os.path.exists, plot_slice_imgfns)):
            slice_dims = [0, 1, 2]
            slice_pos = [.35, .45, .55, .65]
            for fns in plot_slice_fns:
                input_fns = []
                for i, fn in enumerate(fns):
                    if fn is not None:
                        fn = '%s/%s' % (refdir, fn)
                        if i <= 1:
                            fn += ext
                    input_fns.append(fn)

                fn1, fn2, fnout = input_fns
                if not os.path.exists(fnout):
                    _make_slice_plot(fn1, fn2, fnout)
                    print "QA Image saved to %s" % fnout
                else:
                    print "Already exists: %s" % fnout
        else:
            print "QA images already exist"



    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = ('cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
             (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)


    origext = '.asc'
    ext = format2extension(config)
    tp = format2type(config)
    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*%s' % (config['mi_icopat'] % icold, hemi, origext)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    if not sumafile.endswith(origext):
                        raise ValueError("%s does not end with %s" % (sumafile, origext))
                    #s = sumafile.split(".")
                    #s[len(s) - 2] += config['alsuffix'] # insert '_al' just before last dot
                    #alsumafile = ".".join(s)
                    extsumafile = sumafile[:-len(origext)]
                    alsumafile = extsumafile + config['alsuffix'] + ext

                    if config['overwrite'] or not os.path.exists('%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_%s ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, tp, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg%s' % (pat, ext)):
                        sumaout = '%s/%s' % (refdir, extsumafile + ext)
                        if config['overwrite'] or not os.path.exists(sumaout):
                            s = surf.read('%s/%s' % (sumadir, sumafile))
                            surf.write(s, sumaout)
                            #cmds.append('cp %s/%s %s/%s' % (sumadir, sumafile, refdir, sumafile))


        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = pathjoin(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = pathjoin(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Пример #21
0
def null_clustersize(config):
    # helper function - called by critical_clustersize
    # computes maxmimum cluster size of a single null permutation
    output_dir = config['output_dir']
    tthr = config['tthr']
    fwhm = config['fwhm']
    buck_fn_1D = config['buck_fn_1D']
    msk = _mask_expr(config)
    is_surf = _is_surf(config)
    if is_surf:
        surf_fn = config['surface_file']
    ext, ext1D = _ext(config), _ext(config, for1D=True)

    ns = len(config['data_files'])
    cmds = ['cd "%s"' % output_dir]

    # generate N random data files (N=number of participants)
    # use the output bucket to get datasets with the right size
    null_fns = []
    for i in range(ns):
        fn = _fn(config, 'rand_%d' % i, ext1D)
        if is_surf:
            cmds.append("; 1deval -ok_1D_text -a %s'[0]' -expr 'gran(0,1)' > '%s'" % (buck_fn_1D, fn))
        else:
            cmds.append("; 3dcalc -overwrite -prefix %s -a %s'[0]' -expr 'gran(0,1)'" % (fn, buck_fn_1D))
        null_fns.append(fn)

    # bucket random data
    buck_fn = _fn(config, 'rand_buck', ext1D)
    null_fns_list = ' '.join(null_fns)
    if is_surf:
        cmds.append('; 1dcat %s > "%s"' % (null_fns_list, buck_fn))
    else:
        cmds.append('; 3dbucket -overwrite -prefix %s %s' % (buck_fn, null_fns_list))

    # smooth all data at once, using estimated FWHM
    smooth_fn = _fn(config, 'rand_buck_smooth', ext1D)
    if is_surf:
        if config['sigma'] > 0:
            sigma_str = '-sigma %s' % config['sigma']
        else:
            sigma_str = ''
        cmds.append('; SurfSmooth -overwrite %s -met HEAT_07 -i_fs %s -input %s '
                    ' -fwhm %f -output %s %s' % (msk, surf_fn, buck_fn, fwhm, smooth_fn, sigma_str))
    else:
        cmds.append('; 3dBlurInMask -overwrite %s -FWHM %f -prefix %s -input %s' %
                      (msk, fwhm, smooth_fn, buck_fn))

    # run ttest
    if is_surf:
        msk = '' # cannot use mask on surface, but that's fine
               # as it was used in SurfSmooth
    ttest_fn = _fn(config, 'rand_buck_smooth_t', ext1D)
    cmds.append('; 3dttest++ %s -overwrite -prefix %s -setA %s' %
                    (msk, ttest_fn, smooth_fn))


    # extract maximum cluster size (in mm^2 or number of voxels) from output
    # and pipe into size_fn

    size_fn = _fn(config, 'rand_size', '.1D')
    if is_surf:
        postfix = "| grep --invert-match '#' | head -1 | cut -c 18-28"
        cmds.append('; SurfClust -i_fs %s -input %s 1 -rmm -1 '
                    ' -thresh %f -thresh_col 1 %s > "%s"' %
                        (surf_fn, ttest_fn, tthr, postfix, size_fn))
    else:
        postfix = " | grep --invert-match '#' | head -1 | cut -c1-8"
        cmds.append("; 3dclust -quiet -1noneg -1clip %f 0 0 %s'[1]' %s > '%s'" %
                     (tthr, ttest_fn, postfix, size_fn))

    utils.run_cmds(''.join(cmds))

    # read maximum cluster size form size_fn
    sz_str = None
    with open(pathjoin(output_dir, size_fn)) as f:
        sz_str = f.read()

    try:
        sz = float(sz_str)
    except:
        sz = 0. # CHECKME whether this makes sense

    print("Null data: maximum size %f" % sz)

    if is_surf:
        smoothing_fn_rec = pathjoin(output_dir, _fn(config, 'rand_buck_smooth', '.1D.dset.1D.smrec'))
        if not os.path.exists(smoothing_fn_rec):
            raise ValueError("Smoothing did not succeed. Please check the error"
                             " messaged. You may have to set sigma manually")
        with open(smoothing_fn_rec) as f:
            s = f.read()

        final_fwhm = float(s.split()[-2])
        ratio = fwhm / final_fwhm
        thr = 0.9
        if ratio < thr or 1. / ratio < thr:
            raise ValueError('FWHM converged to %s but expected %s. Consider '
                             'setting sigma manually' % (final_fwhm, fwhm))

    # clean up - remove all temporary files

    tmpfns = null_fns + [buck_fn, smooth_fn, ttest_fn, size_fn]
    _remove_files(config, tmpfns)

    return sz
Пример #22
0
def run_makesurfmasks(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']

    if config['sid'] is None:
        raise ValueError('Need sid')

    sumfn = '%s_qa_surf_mask' % config['sid'] # output file

    fullext = config['outvol_fullext']
    volor = config['outvol_ext']

    sumfn_path = '%s/%s%s' % (refdir, sumfn, fullext)
    qafn_path = '%s/%s.png' % (refdir, sumfn)
    checkfn_paths = (sumfn_path, qafn_path)
    if all(map(os.path.exists, checkfn_paths)) and not overwrite:
        print "Already exist: %s" % (", ".join(checkfn_paths))
        return

    icolds, hemis = _get_hemis_icolds(config)

    volexts = ['%s%s' % (volor, e) for e in '.HEAD', '.BRIK*']




    sssuffix = config['sssuffix'] if config['expvol_ss'] else ''
    expvol_fn = '%s%s%s' % (utils.afni_fileparts(config['expvol'])[1],
                            sssuffix,
                            volor)


    #if overwrite or not os.path.exists('%s/%s' % (refdir, sv_al_nii_fn)):
    #    cmd = 'cd %s; 3dcopy -overwrite %s %s' % (refdir, sv_al_orig_fn, sv_al_nii_fn)
    #    utils.run_cmds(cmd, env)


    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    icold = max(icolds)

    oneDfn = '__t.1D'
    oneDtfn = '__tt.1D' # transposed
    cmds = ['cd %s' % refdir,
             '1deval -1D: -num %d -expr 1 > %s' % (icold ** 2 * 10 + 1, oneDfn),
             '1dtranspose %s > %s' % (oneDfn, oneDtfn)]

    utils.run_cmds(';'.join(cmds), env)


    tmpfns = [oneDfn, oneDtfn]

    s2v_cmd = ('3dSurf2Vol -map_func mask2 -data_expr "a*%%d" -spec %%s %%s -sv %s'
             ' -grid_parent %s. -prefix %%s -sdata %s -overwrite') % \
                                (expvol_fn, expvol_fn, oneDtfn)

    infix2val = {'-surf_A pial':1,
               '-surf_A smoothwm':2,
               '-surf_A smoothwm -surf_B pial -f_steps 20': 4}

    volfns = []
    for hemi in hemis:
        specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])

        for infix, val in infix2val.iteritems():
            fnprefix = '__m%d_%sh' % (val, hemi)
            cmd = s2v_cmd % (val, specfn, infix, fnprefix)
            utils.run_cmds('cd %s;%s' % (refdir, cmd))
            tmpfns.extend(['%s%s' % (fnprefix, e) for e in volexts])
            volfns.append(fnprefix + volor)

    cmds = ['cd %s' % refdir]
    catfn = '__cat'
    cmds.extend(['3dTcat -overwrite -prefix %s %s' % (catfn, ' '.join(volfns)),
                 '3dTstat -overwrite -sum -prefix %s %s%s' % (sumfn, catfn, volor)])
    tmpfns.extend(['%s%s' % (catfn, e) for e in volexts])


    cmds.extend('rm %s' % fn for fn in tmpfns)
    cmds.append('echo "Surface mask in %s"' % sumfn)

    utils.run_cmds(';'.join(cmds), env)

    # make plot
    if overwrite or not os.path.exists(qafn_path):
        expvol_path = '%s/%s' % (refdir, expvol_fn)
        _make_slice_plot(expvol_path,
                         sumfn_path,
                         qafn_path)
Пример #23
0
def compute_fwhm(config):
    # helper function - called by critical_clustersize
    # computes FWHM of residuals of input data and stores in config
    output_dir = c['output_dir']

    is_surf = _is_surf(config)
    ext, ext1D = _ext(config), _ext(config, for1D=True)
    if not os.path.exists(output_dir):
        os.mkdir(output_dir)


    cmds = ['cd "%s"' % output_dir]

    # if surfaces and needs padding, do that first
    pad_to_node = config['pad_to_node']
    if is_surf and pad_to_node:
        data_files = []

        for i, fn in enumerate(c['data_files']):
            fn_pad = 'pad_%d%s' % (i, ext)
            cmds.append("; ConvertDset -overwrite -pad_to_node %d -input %s'[%d]' -prefix ./%s" %
                                (pad_to_node, fn, config['brik_index'], fn_pad))
            data_files.append(fn_pad)
        pad_files = data_files
        brik_index = 0
    else:
        data_files = c['data_files']
        pad_files = []
        brik_index = c['brik_index']

    # bucket data from all participants into a single file
    buck_fn = _fn(config, 'buck')

    cmds.append('; 3dbucket -overwrite -prefix %s' % buck_fn)
    for fn in data_files:
        cmds.append(" %s'[%d]'" % (fn, brik_index))

    # also store as 1D (won't hurt)
    if is_surf:
        buck_fn_1D = _fn(config, 'buck', ext1D)
        cmds.append('; ConvertDset -overwrite -o_1D -prefix %s -input %s' %
                    (buck_fn_1D, buck_fn))
    else:
        buck_fn_1D = buck_fn

    # compute group mean
    mean_fn = _fn(config, 'mean')
    cmds.append('; 3dTstat -overwrite -prefix %s %s' % (mean_fn, buck_fn))

    # compute residuals, and estimate FWHM for each of them
    # store FWHM output in fwhm_fn
    fwhm_fn = pathjoin(output_dir, _fn(config, 'fwhm', '.1D'))
    cmds.append('; echo > "%s"' % fwhm_fn)

    resid_fns = []
    for i in range(len(c['data_files'])):
        fn = _fn(config, 'resid_%d' % i)
        cmds.append("; 3dcalc -overwrite -prefix %s -a %s -b %s'[%d]' -expr 'a-b'"
                    % (fn, mean_fn, buck_fn, i))
        msk = _mask_expr(config)
        if is_surf:
            surf_fn = c['surface_file']
            cmds.append("; SurfFWHM %s -input %s -i_fs %s"
                        "| grep ^FWHM  | cut -f2 -d'=' >> '%s'" %
                        (msk, fn, surf_fn, fwhm_fn))
        else:
            cmds.append('; 3dFWHMx %s %s | cut -c18- >> %s' % (msk, fn, fwhm_fn))
        resid_fns.append(fn)

    cmd = ''.join(cmds)
    utils.run_cmds(cmd)

    # read FWHM values and store in config
    with open(fwhm_fn) as f:
        fwhms = f.read().split()

    print(fwhms)
    print(fwhm_fn)

    config['all_fwhms'] = fwhms # all FWHMs (for each participant)
    config['fwhm'] = sum(map(float, fwhms)) / len(fwhms) # average FWHM
    config['buck_fn'] = buck_fn
    config['buck_fn_1D'] = buck_fn_1D

    mean_fwhm_fn = pathjoin(output_dir, _fn(config, 'mean_fwhm', '.1D'))
    with open(mean_fwhm_fn, 'w') as f:
        f.write('%.3f\n' % config['fwhm'])

    tmpfns = resid_fns + pad_files + [mean_fn]
    print("TEMP")
    print(tmpfns)
    _remove_files(config, tmpfns)
Пример #24
0
def null_clustersize(config):
    # helper function - called by critical_clustersize
    # computes maxmimum cluster size of a single null permutation
    output_dir = config['output_dir']
    tthr = config['tthr']
    fwhm = config['fwhm']
    buck_fn_1D = config['buck_fn_1D']
    msk = _mask_expr(config)
    is_surf = _is_surf(config)
    if is_surf:
        surf_fn = config['surface_file']
    ext, ext1D = _ext(config), _ext(config, for1D=True)

    ns = len(config['data_files'])
    cmds = ['cd "%s"' % output_dir]

    # generate N random data files (N=number of participants)
    # use the output bucket to get datasets with the right size
    null_fns = []
    for i in xrange(ns):
        fn = _fn(config, 'rand_%d' % i, ext1D)
        if is_surf:
            cmds.append("; 1deval -ok_1D_text -a %s'[0]' -expr 'gran(0,1)' > '%s'" % (buck_fn_1D, fn))
        else:
            cmds.append("; 3dcalc -overwrite -prefix %s -a %s'[0]' -expr 'gran(0,1)'" % (fn, buck_fn_1D))
        null_fns.append(fn)

    # bucket random data
    buck_fn = _fn(config, 'rand_buck', ext1D)
    null_fns_list = ' '.join(null_fns)
    if is_surf:
        cmds.append('; 1dcat %s > "%s"' % (null_fns_list, buck_fn))
    else:
        cmds.append('; 3dbucket -overwrite -prefix %s %s' % (buck_fn, null_fns_list))

    # smooth all data at once, using estimated FWHM
    smooth_fn = _fn(config, 'rand_buck_smooth', ext1D)
    if is_surf:
        if config['sigma'] > 0:
            sigma_str = '-sigma %s' % config['sigma']
        else:
            sigma_str = ''
        cmds.append('; SurfSmooth -overwrite %s -met HEAT_07 -i_fs %s -input %s '
                    ' -fwhm %f -output %s %s' % (msk, surf_fn, buck_fn, fwhm, smooth_fn, sigma_str))
    else:
        cmds.append('; 3dBlurInMask -overwrite %s -FWHM %f -prefix %s -input %s' %
                      (msk, fwhm, smooth_fn, buck_fn))

    # run ttest
    if is_surf:
        msk = '' # cannot use mask on surface, but that's fine
               # as it was used in SurfSmooth
    ttest_fn = _fn(config, 'rand_buck_smooth_t', ext1D)
    cmds.append('; 3dttest++ %s -overwrite -prefix %s -setA %s' %
                    (msk, ttest_fn, smooth_fn))


    # extract maximum cluster size (in mm^2 or number of voxels) from output
    # and pipe into size_fn

    size_fn = _fn(config, 'rand_size', '.1D')
    if is_surf:
        postfix = "| grep --invert-match '#' | head -1 | cut -c 18-28"
        cmds.append('; SurfClust -i_fs %s -input %s 1 -rmm -1 '
                    ' -thresh %f -thresh_col 1 %s > "%s"' %
                        (surf_fn, ttest_fn, tthr, postfix, size_fn))
    else:
        postfix = " | grep --invert-match '#' | head -1 | cut -c1-8"
        cmds.append("; 3dclust -quiet -1noneg -1clip %f 0 0 %s'[1]' %s > '%s'" %
                     (tthr, ttest_fn, postfix, size_fn))

    utils.run_cmds(''.join(cmds))

    # read maximum cluster size form size_fn
    sz_str = None
    with open(pathjoin(output_dir, size_fn)) as f:
        sz_str = f.read()

    try:
        sz = float(sz_str)
    except:
        sz = 0. # CHECKME whether this makes sense

    print "Null data: maximum size %f" % sz

    if is_surf:
        smoothing_fn_rec = pathjoin(output_dir, _fn(config, 'rand_buck_smooth', '.1D.dset.1D.smrec'))
        if not os.path.exists(smoothing_fn_rec):
            raise ValueError("Smoothing did not succeed. Please check the error"
                             " messaged. You may have to set sigma manually")
        with open(smoothing_fn_rec) as f:
            s = f.read()

        final_fwhm = float(s.split()[-2])
        ratio = fwhm / final_fwhm
        thr = 0.9
        if ratio < thr or 1. / ratio < thr:
            raise ValueError('FWHM converged to %s but expected %s. Consider '
                             'setting sigma manually' % (final_fwhm, fwhm))

    # clean up - remove all temporary files

    tmpfns = null_fns + [buck_fn, smooth_fn, ttest_fn, size_fn]
    _remove_files(config, tmpfns)

    return sz