def _generate_DROI_tables_call(tup):
     import os, neuropythy as ny
     # set our niceness!
     os.nice(10)
     flnm = os.path.join(tup[3], '%s.csv' % (tup[0], ))
     if os.path.isfile(flnm): return flnm
     f = VisualPerformanceFieldsDataset._generate_subject_DROI_table
     subjects = ny.data['visual_performance_fields'].subjects
     tbl = f(subjects,
             tup[0],
             angles=tup[1],
             eccens=tup[2],
             min_variance_explained=tup[4],
             method=tup[5])
     ny.save(flnm, tbl)
     return flnm
Beispiel #2
0
        if not os.path.isabs(maskfile): maskfile = os.path.join(tdir, maskfile)
        if not os.path.isfile(stimfile):
            warn('Could not find mask file (%s) in test dir %s!' %
                 (maskfile, tdir))
        else:
            mask = ny.load(maskfile, to='image')
    # Okay, now convert into something more standard:
    params['frame_rate'] = frate
    params['pixels_per_degree'] = d2p
    params['TR_length'] = trlen
    stim = np.asarray(stim.dataobj)
    params['stimulus'] = np.reshape(stim, [stim.shape[0], stim.shape[1], -1])
    params['data'] = np.asarray(data.dataobj)
    if mask is None:
        params['mask'] = np.ones(params['data'].shape[:-1], dtype='bool')
    else:
        params['mask'] = np.asarray(mask.dataobj)
    # Okay, now we can call the solver:
    note('   Solving...')
    try:
        results = solver(params)
        # Now, write out each of the results
        for (k, v) in six.iteritems(results):
            flnm = os.path.join(tdir, 'out_' + k + '.nii.gz')
            ny.save(flnm, v, 'nifti')
    except Exception:
        warn('Exception raised while solving test dir: %s' % (tdir, ))
        raise

sys.exit(0)
Beispiel #3
0
n = nl + nr
pial = np.vstack([lpial.coordinates.T, rpial.coordinates.T])
try:
    sh = space.cKDTree(pial)
except Exception:
    sh = space.KDTree(pial)

(d, ii) = sh.query(coords)
lpts = (ii < nl)
rpts = ~lpts
(lpts, rpts) = [np.where(pts)[0] for pts in (lpts, rpts)]

fsa = ny.freesurfer_subject('fsaverage')
res = np.full(coords.shape, np.nan)
for (h, pts) in zip(['lh', 'rh'], [lpts, rpts]):
    if len(pts) == 0: continue
    idcs = ii[pts]
    if h == 'rh': idcs = idcs - nl
    fshem = fsa.hemis[h]
    sbhem = sub.hemis[h]
    fsmsh = fshem.registrations['fsaverage']
    addrs = fsmsh.address(sbhem.registrations['fsaverage'].coordinates[:,
                                                                       idcs].T)
    res[pts, :] = fshem.pial_surface.unaddress(addrs).T

df = ny.to_dataframe({k: v for (k, v) in zip(['x', 'y', 'z'], res.T)})
ny.save(sys.argv[3], df)

note('Success!')
sys.exit(0)
Beispiel #4
0
def main(args):
    # Parse the arguments...
    parser = argparse.ArgumentParser()
    parser.add_argument('sub', metavar='subject', nargs=1,
                        help=('The FreeSurfer subject ID or directory.'))
    parser.add_argument('outdir', metavar='directory', type=str, nargs=1,
                        help='The directory containing the output nifti files.')
    parser.add_argument('-x', '--no-surf', required=False, default=True,
                        dest='surface', action='store_false',
                        help=('If provided, instructs the script not to produce files of the '
                              'pRF parameters resampled onto the cortical surface.'))
    parser.add_argument('-m', '--method', required=False, default='linear', dest='method',
                        help=('The method to use for volume-to-surface interpolation; this may'
                              ' be nearest or linear; the default is linear.'))
    parser.add_argument('-l', '--layer', required=False, default='midgray', dest='layer',
                        help=('Specifies the cortical layer to user in interpolation from volume'
                              ' to surface. By default, uses midgray. May be set to a value'
                              ' between 0 (white) and 1 (pial) to specify an intermediate surface'
                              ' or may be simply white, pial, or midgray.'))
    parser.add_argument('-d', '--subjects-dir', required=False, default=None, dest='sdir',
                        help=('Specifies the subjects directory to use; by default uses the'
                              ' environment variable SUBJECTS_DIR.'))
    parser.add_argument('-y', '--no-invert-y', required=False, default=True, dest='invert_y',
                        action='store_false',
                        help=('If provided, does not invert the y-coordinate exported from'
                              ' VistaSoft; by default this inversion is performed.'))
    parser.add_argument('-v', '--verbose', required=False, default=False, action='store_true',
                        dest='verbose', help='Print verbose output')
    if args[0].startswith('python'): args = args[2:]
    else: args = args[1:]
    args = parser.parse_args(args)
    # Check some of the arguments...
    sub = ny.freesurfer_subject(args.sub[0])
    dosurf = args.surface
    outdir = args.outdir[0]
    if not os.path.isdir(outdir):
        raise RuntimeError('Directory %s does not exist' % outdir)
    else:
        os.chdir(outdir)
    if args.verbose:
        def note(*args):
            six.print_(*args, flush=True)
            return True
    else:
        def note(*args):
            return False
    try: args.layer = float(args.layer)
    except: pass
    # figure out what datasets are here...
    dsets = [fl[:-13] for fl in os.listdir('.') if fl.endswith('-xcrds.nii.gz')]
    # loop over the given EPIs
    for ds in dsets:
        note('Processing Dataset %s...' % ds)
        # import the files...
        note('  - Importing parameters...')
        (x,y,s,v) = [ny.load('%s-%s.nii.gz' % (ds, suff), to='image')
                     for suff in ['xcrds','ycrds','sigma','vexpl']]
        # fix polar angle/eccen
        note('  - Creating polar angle/eccentricity images...')
        ang = np.arctan2(-y.get_data() if args.invert_y else y.get_data(), x.get_data())
        ang = np.mod((90.0 - 180.0/np.pi * ang) + 180, 360) - 180
        a = nib.Nifti1Image(ang, x.affine, x.header)
        e = nib.Nifti1Image(np.sqrt(y.get_data()**2 + x.get_data()**2), x.affine, x.header)
        a.to_filename('%s-angle.nii.gz' % ds)
        e.to_filename('%s-eccen.nii.gz' % ds)
        # surfaces...
        if not dosurf: continue
        note('  - Projecting to surface...')
        to_output = {'xcrds':x, 'ycrds':y, 'sigma': s, 'vexpl': v}
        xy = ([0,0],[0,0])
        for (dname,im) in six.iteritems(to_output):
            (ldat, rdat) = sub.image_to_cortex(im, args.layer,
                                               method=args.method, dtype=np.float32,
                                               weights=v)
            if dname == 'xcrds':
                xy[0][0] = ldat
                xy[1][0] = rdat
            elif dname == 'ycrds':
                xy[0][1] = ldat
                xy[1][1] = rdat
            # we need to fix the dimensions...
            for (d,h) in zip([ldat,rdat], ['lh','rh']):
                ny.save('%s.%s-%s.mgz' % (h, ds, dname), d)
        # Special handling for the angle and eccen (avoids need for circular averaging)
        xy = (np.asarray(xy[0]), np.asarray(xy[1]))
        (lecc,recc) = [np.sqrt(np.sum(u**2, axis=0)) for u in xy]
        (lang,rang) = [np.arctan2(-u[1] if args.invert_y else u[1], u[0])
                       for u in (xy[0] * ny.util.zinv(lecc), xy[1] * ny.util.zinv(recc))]
        (lang,rang) = [np.mod((90 - 180/np.pi * aa) + 180, 360) - 180 for aa in (lang,rang)]
        # export these...
        for (h,nm,dat) in [('lh','eccen',lecc), ('lh','angle',lang),
                           ('rh','eccen',recc), ('rh','angle',rang)]:
            ny.save('%s.%s-%s.mgz' % (h, ds, nm), dat)
    # That's it!
    return 0
Beispiel #5
0
            (anat,sid,h,meta) = msg.split(':')[0].split(' path ')[-1].split('/')
            sid = int(sid)
            tag = 'mesh'
        elif 'Incomplete set of lines for' in msg:
            (anat, sid, h) = msg.split(' lines for ')[-1].split(' / ')
            sid = int(sid)
            tag = 'anatomist'
        else: raise ValueError('Unrecognized warning in file %s: %s' % (flnm,ln))
        if anat != 'mean': errs.append((anat,sid,h))
        dat = errnotes[sid]
        if anat not in dat[h]: dat[h][anat] = []
        lst = dat[h][anat]
        entry = {'tag': tag, 'message': msg}
        if meta is not None: entry['metadata'] = meta
        lst.append(entry)
        if bh:
            h = ('lh' if h == 'rh' else 'rh')
            if anat not in dat[h]: dat[h][anat] = []
            lst = dat[h][anat]
            lst.append(entry)
errs = list(map(list, sorted(set(errs))))
# save the json exclusions file
ny.save(os.path.join(log_path, 'exclusions.json'), errs, 'JSON')
# and the json errors file
errnotes = {str(k):v for (k,v) in six.iteritems(errnotes)}
ny.save(os.path.join(log_path, 'errors.json'), errnotes, 'JSON')

sys.exit(0)