Exemple #1
0
def to_image_spec(img, **kw):
    '''
    to_image_spec(img) yields a dictionary of meta-data for the given nibabel image object img.
    to_image_spec(hdr) yields the equivalent meta-data for the given nibabel image header.

    Note that obj may also be a mapping object, in which case it is returned verbatim.
    '''
    if pimms.is_vector(img, 'int') and is_tuple(img) and len(img) < 5:
        r = image_array_to_spec(np.zeros(img))
    elif pimms.is_map(img):
        r = img
    elif is_image_header(img):
        r = image_header_to_spec(img)
    elif is_image(img):
        r = image_to_spec(img)
    elif is_image_array(img):
        r = image_array_to_spec(img)
    else:
        raise ValueError('cannot convert object of type %s to image-spec' %
                         type(img))
    if len(kw) > 0: r = {k: v for m in (r, kw) for (k, v) in six.iteritems(m)}
    # normalize the entries
    for (k, aliases) in six.iteritems(imspec_aliases):
        if k in r: continue
        for al in aliases:
            if al in r:
                val = r[al]
                r = pimms.assoc(pimms.dissoc(r, al), k, val)
                break
    return r
Exemple #2
0
def to_credentials(arg):
    '''
    to_credentials(arg) converts arg into a pair (key, secret) if arg can be coerced into such a
      pair and otherwise raises an error.
    
    Possible inputs include:
      * A tuple (key, secret)
      * A mapping with the keys 'key' and 'secret'
      * The name of a file that can load credentials via the load_credentials() function
      * A string that separates the key and secret by ':', e.g., 'mykey:mysecret'
      * A string that separates the key and secret by a "\n", e.g., "mykey\nmysecret"
    '''
    if pimms.is_str(arg):
        try:
            return load_credentials(arg)
        except Exception:
            pass
        try:
            return str_to_credentials(arg)
        except Exception:
            raise ValueError(
                'String "%s" is neither a file containing credentials nor a valid'
                ' credentials string itself.' % arg)
    elif pimms.is_map(arg) and 'key' in arg and 'secret' in arg:
        return (arg['key'], arg['secret'])
    elif pimms.is_vector(arg, str) and len(arg) == 2:
        return tuple(arg)
    else:
        raise ValueError(
            'given argument cannot be coerced to credentials: %s' % arg)
Exemple #3
0
def finto(x, ii, n, null=0):
    '''
    finto(x,ii,n) yields a vector u of length n such that u[ii] = x.

    Notes:
      * The ii index may be a tuple (as can be passed to numpy arrays' getitem method) in order to
        specify that the specific elements of a multidimensional output be set. In this case, the
        argument n should be a tuple of sizes (a single integer is taken to be a square/cube/etc).
      * x may be a sparse-array, but in it will be reified by this function.

    The following optional arguments are allowed:
      * null (defaut: 0) specifies the value that should appear in the elements of u that are not
        set.
    '''
    x  = x.toarray() if sps.issparse(x) else np.asarray(x)
    shx = x.shape
    if isinstance(ii, tuple):
        if not pimms.is_vector(n): n = tuple([n for _ in ii])
        if len(n) != len(ii): raise ValueError('%d-dim index but %d-dim output' % (len(ii),len(n)))
        sh = n + shx[1:]
    elif pimms.is_int(ii): sh = (n,) + shx
    else:                  sh = (n,) + shx[1:]
    u = np.zeros(sh, dtype=x.dtype) if null == 0 else np.full(sh, null, dtype=x.dtype)
    u[ii] = x
    return u
Exemple #4
0
def face_vmag(hemi, retinotopy='any', to=None, **kw):
    '''
    face_vmag(mesh) yields the visual magnification based on the projection of individual faces on
      the cortical surface into the visual field.
    face_vmag(mdat) uses the given magnification data mdat (as returned from mag_data()); if valid
      magnification data is passed then all options related to the mag_data() function are ignored.

    All options accepted by mag_data() are accepted by disk_vmag().

    The additional optional arguments are also accepted:
      * to (default: None) specifies that the resulting data should be transformed in some way;
        these transformations are:
          * None or 'data': returns the full magnification data without transformation;
          * 'faces': returns a property of the visual magnification value of each face;
          * 'vertices': returns a property of the visual magnification value of each vertex, as
            determined by averaging the magnification 
    '''
    mdat = mag_data(hemi, retinotopy=retinotopy, **kw)
    if pimms.is_vector(mdat): return tuple([face_vmag(m, to=to) for m in mdat])
    elif pimms.is_map(mdat.keys(), 'int'):
        return pimms.lazy_map({k: curry(lambda k: face_vmag(mdat[k], to=to), k)
                               for k in six.iterkeys(mdat)})
    #TODO: implement the face_vmag calculation using mdat
    # convert to the appropriate type according to the to param
    raise NotImplementedError()
Exemple #5
0
 def angle_to_cortex(self, theta, rho):
     'See help(neuropythy.registration.RetinotopyModel.angle_to_cortex).'
     #TODO: This should be made to work correctly with visual area boundaries: this could be done
     # by, for each area (e.g., V2) looking at its boundaries (with V1 and V3) and flipping the
     # adjacent triangles so that there is complete coverage of each hemifield, guaranteed.
     if not pimms.is_vector(theta):
         return self.angle_to_cortex([theta], [rho])[0]
     theta = np.asarray(theta)
     rho = np.asarray(rho)
     zs = np.asarray(rho * np.exp(
         [np.complex(z) for z in 1j * ((90.0 - theta) / 180.0 * np.pi)]),
                     dtype=np.complex)
     coords = np.asarray([zs.real, zs.imag]).T
     if coords.shape[0] == 0:
         return np.zeros((0, len(self.visual_meshes), 2))
     # we step through each area in the forward model and return the appropriate values
     tx = self.transform
     res = np.transpose([
         msh.interpolate(
             coords, msh.prop('cortical_coordinates'), method='linear')
         for area in sorted(self.visual_meshes.keys())
         for msh in [self.visual_meshes[area]]
     ], (1, 0, 2))
     if tx is not None:
         res = np.asarray([
             np.dot(tx, np.vstack(
                 (area_xy.T, np.ones(len(area_xy)))))[0:2].T
             for area_xy in res
         ])
     return res
Exemple #6
0
def disk_vmag(hemi, retinotopy='any', to=None, **kw):
    '''
    disk_vmag(mesh) yields the visual magnification based on the projection of disks on the cortical
      surface into the visual field.

    All options accepted by mag_data() are accepted by disk_vmag().
    '''
    mdat = mag_data(hemi, retinotopy=retinotopy, **kw)
    if pimms.is_vector(mdat): return tuple([face_vmag(m, to=to) for m in mdat])
    elif pimms.is_vector(mdat.keys(), 'int'):
        return pimms.lazy_map({
            k: curry(lambda k: face_vmag(mdat[k], to=to), k)
            for k in six.iterkeys(mdat)
        })
    #TODO: implement the disk_vmag calculation using mdat
    # convert to the appropriate type according to the to param
    raise NotImplementedError()
Exemple #7
0
def calc_labels(subject, hemisphere_tags, hemisphere_data, labels=None):
    '''
    calc_labels finds the available label data for the subject on which the retinotopy operations
    are being performed.

    Afferent parameters:
      @ labels 
        The filenames of the files containing label data for the subject's hemispheres. Label data
        can be provided in mgz, annot, or curv files containing visual area labels, one per vertex.
        The labels argument may be specified as a comma-separated list of filenames (in the same
        order as the hemispheres, which are lh then rh by default) or as a single template filename
        that may contain the character * as a stand-in for the hemisphere tag. For example,
        '/data/*.v123_labels.mgz' would look for the file /data/lh.v123_labels.mgz for the 'lh'
        hemisphere and for /data/rh_LR32k.v123_labels.mgz for the 'rh_LR32k' hemisphere.
        Note that labels are not required--without labels, no field-sign minimization is performed,
        so retinotopic cleaning may be less reliable. Note that additionally, labels may be
        preceded by the relevant tag; so instead of '/data/*.v123_labels.mgz' with, as in the 
        example, hemispheres 'lh,rh_LR32k', one could use the arguments
        'lh:/data/lh.v123_labels.mgz,rh:/data/rh_LR32k.v123_labels.mgz' (for labels) and
        'lh,rh:rh_LR32k' for hemispheres.
    '''
    lbls = {}
    # no argument this is fine--no labels are used
    if lbls is None: return {'label_data': pyr.m()}
    if not pimms.is_str(labels):
        raise ValueError('could not understand non-string labels')
    # first, it might just be a template pattern
    fls = {}
    if '*' in labels:
        sparts = labels.split('*')
        for h in hemisphere_tags:
            flnm = h.join(sparts)
            fls[h] = os.path.expanduser(os.path.expandvars(flnm))
    else:
        # okay, separate commas...
        lsplit = labels.split(',')
        for (k, l) in enumerate(lsplit):
            if ':' in l: (tag, name) = l.split(':')
            elif k < len(hemisphere_tags):
                (tag, name) = (hemisphere_taks[k], l)
            else:
                raise ValueError('could not match labels to hemispheres')
            if tag not in hemisphere_data:
                raise ValueError('Tag %s (in labels arg) does not exist' %
                                 (tag, ))
            fls[tag] = os.path.expanduser(os.path.expandvars(name))
    for (tag, name) in six.iteritems(fls):
        if not os.path.isfile(name):
            raise ValueError('Labels filename %s not found' % (name, ))
        hem = hemisphere_data[tag]
        tmp = nyio.load(name)
        if not pimms.is_vector(tmp) or len(tmp) != hem.vertex_count:
            raise ValueError('Labels file %s does not contain label data' %
                             (name, ))
        lbls[tag] = np.asarray(tmp)
    return {'label_data': pimms.persist(lbls)}
Exemple #8
0
 def __getitem__(self, k):
     if pimms.is_int(k): return self.by_id.get(k, None)
     elif pimms.is_str(k): return self.by_name.get(k, None)
     elif pimms.is_vector(k, 'int'):
         return np.asarray([self.by_id.get(k, None) for k in k])
     else:
         return np.asarray([
             (self.by_name if pimms.is_str(k) else self.by_id).get(k, None)
             for k in k
         ])
Exemple #9
0
def label_index(dat, *args, **kw):
    '''
    label_index(idx_map) converts the given map- or dict-like object idx_map into a label index by
      assuming that the keys are label ids and the values are label names or tuples of label names
      and (r,g,b,a) colors.
    label_index(ids, names) uses the given ids and names to make the label index.
    label_index(ids, names, colors) additionally uses the given colors.

    Note that if there is not a label with id 0 then such a label is automatically created with the
    name 'none', the rgba color [0,0,0,0], and no entry meta-data. As a general rule, the label 0
    should be used to indicate that a label is missing.

    The optional arguments meta_data and entry_meta_data may specify both the meta-data for the
    label-index object itself as well as the meta-data for the individual entries.
    '''
    md = kw.pop('meta_data', {})
    mds = kw.pop('entry_meta_data', None)
    if len(kw) > 0: raise ValueError('unrecognized optional argument(s) given to label_index')
    if len(args) == 0:
        if pimms.is_map(dat):
            (ids,nms,clrs) = ([],[],[])
            for (k,v) in six.iteritems(dat):
                if pimms.is_scalar(v): c = None
                else: (v,c) = v
                if pimms.is_str(k):
                    ids.append(v)
                    nms.append(k)
                else:
                    ids.append(k)
                    nms.append(v)
                if c is not None: clrs.append(c)
        elif is_dataframe(dat):
            if dat.index.name.lower() == 'id': ids = dat.index.values
            else: ids = dat['id'].values
            nms = dat['name'].values
            if 'color' in dat: clrs = np.array(list(map(list, dat['color'].values)))
            elif all(k in dat for k in ['r','g','b']):
                ks = ['r','g','b']
                if 'a' in dat: ks.append('a')
                clrs = np.array([[r[k] for k in ks].values for (ii,r) in dat.iterrows()])
            else: clrs = []
        elif pimms.is_vector(dat, 'int'):
            ids = np.unique(dat)
            nms = ['label%d'%k for k in ids]
            clrs = []
        else: raise ValueError('label_index(idx_map) given non-map argument')
    elif len(args) == 1: (ids,nms,clrs) = (dat, args[0], [])
    elif len(args) == 2: (ids,nms,clrs) = (dat, args[0], args[1])
    else: raise ValueError('Too many arguments given to label_index()')
    if clrs is None or len(clrs) == 0: clrs = None
    elif len(clrs) != len(ids): raise ValueError('color-count must match id-count')
    # okay, make the label index
    return LabelIndex(ids, nms, colors=clrs, meta_data=md, entry_meta_data=mds)
Exemple #10
0
 def wout_meta(self, *args, **kwargs):
     '''
     obj.wout_meta(...) removes the given arguments (keys) from the object's current meta_data
     map and yields a new object with the new meta-data.
     '''
     md = self.meta_data
     for a in args:
         if pimms.is_vector(a):
             for u in a:
                 md = md.discard(u)
         else:
             md = md.discard(a)
     return self if md is self.meta_data else self.copy(meta_data=md)
Exemple #11
0
 def _atlas_to_atlver(atl):
     atl0 = atl
     if not pimms.is_vector(atl):
         if ':' in atl:
             atl = atl.split(':')
             if len(atl) != 2: raise ValueError('Cannot parse atlas spec: %s' % atl0)
         else: atl = [atl, None]
     if len(atl) != 2: raise ValueError('Improperly specified atlas: %s' % atl0)
     if pimms.is_str(atl[1]):
         if len(atl[1]) == 0: atl = (atl[0], None)
         else:
             if atl[1][0] == 'v': atl[1] = atl[1][1:]
             try: atl = (atl[0], tuple([int(x) for x in re.split(r'[-_.]+', atl[1])]))
             except Exception:
                 raise ValueError('Could not parse atlas version string: %s' % atl[1])
     elif pimms.is_int(atl[1]):  atl = (atl[0], (atl[1],))
     elif pimms.is_real(atl[1]): atl = (atl[0], (int(atl[1]), int(10*(atl[1] - int(atl[1]))),))
     elif pimms.is_vector(atl[1], int): atl = (atl[0], tuple(atl[1]))
     elif atl[1] is not None:
         raise ValueError('atlas version must be a string (like "v1_5_1") or a list of ints')
     else: atl = tuple(atl)
     return atl + (atl0,)
Exemple #12
0
def isoline_vmag(hemi, isolines=None, surface='midgray', min_length=2, **kw):
    '''
    isoline_vmag(hemi) calculates the visual magnification function f using the default set of
      iso-lines (as returned by neuropythy.vision.visual_isolines()). The hemi argument may
      alternately be a mesh object.
    isoline_vmag(hemi, isolns) uses the given iso-lines rather than the default ones.
    
    The return value of this funciton is a dictionary whose keys are 'tangential', 'radial', and
    'areal', and whose values are the estimated visual magnification functions. These functions
    are of the form f(x,y) where x and y can be numbers or arrays in the visual field.
    '''
    from neuropythy.util import (curry, zinv)
    from neuropythy.mri import is_cortex
    from neuropythy.vision import visual_isolines
    from neuropythy.geometry import to_mesh
    # if there's no isolines, get them
    if isolines is None: isolines = visual_isolines(hemi, **kw)
    # see if the isolines is a lazy map of visual areas; if so return a lazy map recursing...
    if pimms.is_vector(isolines.keys(), 'int'):
        f = lambda k: isoline_vmag(isolines[k], surface=surface, min_length=min_length)
        return pimms.lazy_map({k:curry(f, k) for k in six.iterkeys(isolines)})
    mesh = to_mesh((hemi, surface))
    # filter by min length
    if min_length is not None:
        isolines = {k: {kk: {kkk: [vvv[ii] for ii in iis] for (kkk,vvv) in six.iteritems(vv)}
                        for (kk,vv) in six.iteritems(v)
                        for iis in [[ii for (ii,u) in enumerate(vv['polar_angles'])
                                     if len(u) >= min_length]]
                        if len(iis) > 0}
                    for (k,v) in six.iteritems(isolines)}
    (rlns,tlns) = [isolines[k] for k in ['eccentricity', 'polar_angle']]
    if len(rlns) < 2: raise ValueError('fewer than 2 iso-eccentricity lines found')
    if len(tlns) < 2: raise ValueError('fewer than 2 iso-angle lines found')
    # grab the visual/surface lines
    ((rvlns,tvlns),(rslns,tslns)) = [[[u for lns in six.itervalues(xlns) for u in lns[k]]
                                      for xlns in (rlns,tlns)]
                                     for k in ('visual_coordinates','surface_coordinates')]
    # calculate some distances
    (rslen,tslen) = [[np.sqrt(np.sum((sx[:,:-1] - sx[:,1:])**2, 0)) for sx in slns]
                     for slns in (rslns,tslns)]
    (rvlen,tvlen) = [[np.sqrt(np.sum((vx[:,:-1] - vx[:,1:])**2, 0)) for vx in vlns]
                     for vlns in (rvlns,tvlns)]
    (rvxy, tvxy)  = [[0.5*(vx[:,:-1] + vx[:,1:]) for vx in vlns] for vlns in (rvlns,tvlns)]
    (rvlen,tvlen,rslen,tslen) = [np.concatenate(u) for u in (rvlen,tvlen,rslen,tslen)]
    (rvxy,tvxy)   = [np.hstack(vxy) for vxy in (rvxy,tvxy)]
    (rvmag,tvmag) = [vlen * zinv(slen) for (vlen,slen) in zip([rvlen,tvlen],[rslen,tslen])]
    return {k: {'visual_coordinates':vxy, 'visual_magnification': vmag,
                'visual_lengths': vlen, 'surface_lengths': slen}
            for (k,vxy,vmag,vlen,slen) in zip(['radial','tangential'], [rvxy,tvxy],
                                              [rvmag,tvmag], [rvlen,tvlen], [rslen,tslen])}
Exemple #13
0
 def cortex_to_angle(self, x, y):
     'See RetinotopyModel.cortex_to_angle.'
     if not pimms.is_vector(x): return self.cortex_to_angle([x], [y])[0]
     # start by applying the transform to the points
     tx = self.inverse_transform
     xy = np.asarray([x,y]).T if tx is None else np.dot(tx, [x,y,np.ones(len(x))])[0:2].T
     # we only need to interpolate from the inverse mesh in this case
     interp_ae = self.cortical_mesh.interpolate(xy, [self.polar_angles, self.eccentricities],
                                                method='linear')
     interp_id = self.cortical_mesh.interpolate(xy, self.visual_areas,
                                                method='heaviest')
     interp = np.asarray([interp_ae[0], interp_ae[1], interp_id])
     bad = np.where(np.isnan(np.prod(interp, axis=0)))[0]
     interp[:,bad] = 0.0
     return interp
Exemple #14
0
def image_shape(arg):
    '''
    image_shape(im) yields the image shape for the given image im. The argument im may be an image,
      an array, an image header, or an image spec.
    '''
    if   is_image(arg):                                sh = arg.shape
    elif pimms.is_vector(arg, 'int') and len(arg) < 5: sh = tuple(arg)
    elif is_image_spec(arg):                           sh = imspec_lookup(arg, 'image_shape')
    elif is_image_header(arg):                         sh = image_header_to_spec(arg)['image_shape']
    elif is_image_array(arg):                          sh = np.shape(arg)
    else: raise VelueError('Bad argument of type %s given to image_shape()' % type(arg))
    sh = tuple(sh)
    if   len(sh) == 2: sh = (sh[0], 1, 1, sh[1])
    elif len(sh) == 1: sh = (sh[0], 1, 1)
    return sh
Exemple #15
0
def save_string(filename, s):
    '''
    save_string(filename, s) saves the given string to the given file as text. The argument s may
      be a list of strings (lines). If s is neither a list of strings nor a string, it is cast to
      a string using str(s).
    '''
    if pimms.is_vector(s, 'string'):
        with open(filename, 'w') as fl:
            for l in s:
                fl.write(s + '\n')
        return filename
    elif not pimms.is_str(s):
        s = str(s)
    with open(filename, 'w') as fl:
        fl.write(s)
    return filename
Exemple #16
0
def to_pseudo_path(obj):
    '''
    to_pseudo_path(obj) yields a pseudo-path object that has been coerced from the given obj or
      raises an exception. If the obj is a pseudo-path already, it is returned unchanged.
    '''
    if is_pseudo_path(obj): return obj
    elif pimms.is_str(obj): return pseudo_path(obj)
    elif pimms.is_vector(obj):
        if len(obj) > 0 and pimms.is_map(obj[-1]):
            (obj, kw) = (obj[:-1], obj[-1])
        else:
            kw = {}
        return pseudo_path(*obj, **kw)
    else:
        raise ValueError('cannot coerce given object to a pseudo-path: %s' %
                         obj)
Exemple #17
0
def to_affine(aff, dims=None):
    '''
    to_affine(None) yields None.
    to_affine(data) yields an affine transformation matrix equivalent to that given in data. Such a
      matrix may be specified either as (matrix, offset_vector), as an (n+1)x(n+1) matrix, or, as an
      n x (n+1) matrix.
    to_affine(data, dims) additionally requires that the dimensionality of the data be dims; meaning
      that the returned matrix will be of size (dims+1) x (dims+1).
    '''
    if aff is None: return None
    if isinstance(aff, _tuple_type):
        # allowed to be (mtx, offset)
        if (len(aff) != 2                       or
            not pimms.is_matrix(aff[0], 'real') or
            not pimms.is_vector(aff[1], 'real')):
            raise ValueError('affine transforms must be matrices or (mtx,offset) tuples')
        mtx = np.asarray(aff[0])
        off = np.asarray(aff[1])
        if dims is not None:
            if mtx.shape[0] != dims or mtx.shape[1] != dims:
                raise ValueError('%dD affine matrix must be %d x %d' % (dims,dims,dims))
            if off.shape[0] != dims:
                raise ValueError('%dD affine offset must have length %d' % (dims,dims))
        else:
            dims = off.shape[0]
            if mtx.shape[0] != dims or mtx.shape[1] != dims:
                raise ValueError('with offset size=%d, matrix must be %d x %d' % (dims,dims,dims))
        aff = np.zeros((dims+1,dims+1), dtype=np.float)
        aff[dims,dims] = 1
        aff[0:dims,0:dims] = mtx
        aff[0:dims,dims] = off
        return pimms.imm_array(aff)
    if not pimms.is_matrix(aff, 'real'):
        raise ValueError('affine transforms must be matrices or (mtx, offset) tuples')
    aff = np.asarray(aff)
    if dims is None:
        dims = aff.shape[1] - 1
    if aff.shape[0] == dims:
        lastrow = np.zeros((1,dims+1))
        lastrow[0,-1] = 1
        aff = np.concatenate((aff, lastrow))
    if aff.shape[1] != dims+1 or aff.shape[0] != dims+1:
        arg = (dims, dims,dims+1, dims+1,dims+1)
        raise ValueError('%dD affine matrix must be %dx%d or %dx%d' % args)
    return aff
Exemple #18
0
 def finish_mag_data(mask):
     if len(mask) == 0: return None
     # now that we have the mask, we can subsample
     submesh = mesh.submesh(mask)
     mask = mesh.tess.index(submesh.labels)
     mdata = pyr.pmap({k:(v[mask]   if pimms.is_vector(v) else
                          v[:,mask] if pimms.is_matrix(v) else
                          None)
                       for (k,v) in six.iteritems(rdata)})
     fs = submesh.tess.indexed_faces
     (vx, sx)  = [x[:,mask]                        for x in (vcoords, scoords)]
     (vfx,sfx) = [np.asarray([x[:,f] for f in fs]) for x in (vx,      sx)]
     (va, sa)  = [geo.triangle_area(*x)            for x in (vfx, sfx)]
     return pyr.m(surface_coordinates=sx, visual_coordinates=vx,
                  surface_areas=sa,       visual_areas=va,
                  mesh=mesh,              submesh=submesh,
                  retinotopy_data=rdata,  masked_data=mdata,
                  mask=mask,              area_of_range=rarea)
Exemple #19
0
def to_label_index(obj):
    '''
    to_label_index(obj) attempts to coerce the given object into a label index object; if obj is
      already a label index object, then obj itself is returned. If obj cannot be coerced into a
      label index, then an error is raised.

    The obj argument can be any of the following:
      * a label index
      * a label list (i.e., an integer vector)
      * a tuple of arguments, potentially ending with a kw-options map, that can be passed to the
        label_index function successfully.
    '''
    if   is_label_index(obj): return obj
    elif pimms.is_vector(obj, 'int'): return label_index(obj)
    elif is_dataframe(obj): return label_index(obj)
    elif is_tuple(obj):
        if len(obj) > 1 and pimms.is_map(obj[-1]): return label_index(*obj[:-1], **obj[-1])
        else: return label_index(*obj)
    else: raise ValueError('could not parse to_label_index parameter: %s' % obj)
Exemple #20
0
def main(*argv):
    '''
    neuropythy.commands.atlas.main() runs the main function of the atlas command in the neuropythy
      library. See the `python -m neuropythy atlas --help` or atlas.info for more information.
    '''
    argv = [aa for arg in argv for aa in (arg if pimms.is_vector(arg) else [arg])]
    imap = pimms.argv_parse(
        atlas_plan, argv,
        arg_abbrevs=atlas_cmdline_abbrevs)
    argv = imap['argv']
    if len(argv) == 0 or '--help' in argv or '-h' in argv:
        print(info)
        return 1
    try: imap['export_all_fn']()
    except Exception as e:
        sys.stderr.write('\nERROR:\n' + str(e) + '\n')
        sys.stderr.flush()
        sys.exit(2)
    return 0
Exemple #21
0
 def to_name(nm):
     '''
     Dataset.to_name(name) yields a valid dataset name equivalent to the given name or raises an
       error if name is not valid. In order to be valid, a name must be either strings or a tuple
       of number and strings that start with a string.
     '''
     if pimms.is_str(nm): return nm
     if not pimms.is_vector(nm):
         raise ValueError('name must be a string or tuple')
     if len(nm) < 1:
         raise ValueError(
             'names that are tuples must have at least one element')
     if not pimms.is_str(nm):
         raise ValueError('names that are tuples must begin with a string')
     if not all(pimms.is_str(x) or pimms.is_number(x) for x in nm):
         raise ValueError(
             'dataset names that are tuples must contain only strings and numbers'
         )
     return tuple(nm)
Exemple #22
0
 def test_units(self):
     '''
     test_units ensures that the various pimms functions related to pint integration work
     correctly; these functions include pimms.unit, .mag, .quant, .is_quantity, etc.
     '''
     # make a few pieces of data with types
     x = np.asarray([1.0, 2.0, 3.0, 4.0]) * pimms.units.mm
     y = pimms.quant([2, 4, 6, 8], 'sec')
     for u in [x, y]:
         self.assertTrue(pimms.is_quantity(u))
     for u in ('abc', 123, 9.0, []):
         self.assertFalse(pimms.is_quantity(u))
     for u in [x, y]:
         self.assertFalse(pimms.is_quantity(pimms.mag(u)))
     self.assertTrue(pimms.like_units(pimms.unit(x), pimms.unit('yards')))
     self.assertTrue(pimms.like_units(pimms.unit(y), pimms.unit('minutes')))
     self.assertFalse(pimms.like_units(pimms.unit(y), pimms.unit('mm')))
     z = x / y
     self.assertTrue(pimms.is_vector(x, 'real'))
     self.assertTrue(pimms.is_vector(y, 'real'))
     self.assertFalse(pimms.is_vector(x, 'int'))
     self.assertTrue(pimms.is_vector(y, 'int'))
     self.assertTrue(pimms.is_vector(y, 'float'))
     self.assertTrue(pimms.is_vector(z, 'real'))
Exemple #23
0
def mag_data(hemi, retinotopy='any', surface='midgray', mask=None,
             weights=Ellipsis, weight_min=0, weight_transform=Ellipsis,
             visual_area=None, visual_area_mask=Ellipsis,
             eccentricity_range=None, polar_angle_range=None):
    '''
    mag_data(hemi) yields a map of visual/cortical magnification data for the given hemisphere.
    mag_data(mesh) uses the given mesh.
    mag_data([arg1, arg2...]) maps over the given hemisphere or mesh arguments.
    mag_data(subject) is equivalent to mag_data([subject.lh, subject.rh]).
    mag_data(mdata) for a valid magnification data map mdata (i.e., is_mag_data(mdata) is True or
      mdata is a lazy map with integer keys) always yields mdata without considering any additional
      arguments.

    The data structure returned by magdata is a lazy map containing the keys:
      * 'surface_coordinates': a (2 x N) or (3 x N) matrix of the mesh coordinates in the mask
        (usually in mm).
      * 'visual_coordinates': a (2 x N) matrix of the (x,y) visual field coordinates (in degrees).
      * 'surface_areas': a length N vector of the surface areas of the faces in the mesh.
      * 'visual_areas': a length N vector of the areas of the faces in the visual field.
      * 'mesh': the full mesh from which the surface coordinates are obtained.
      * 'submesh': the submesh of mesh of just the vertices in the mask (may be identical to mesh).
      * 'mask': the mask used.
      * 'retinotopy_data': the full set of retinotopy_data from the hemi/mesh; note that this will
        include the key 'weights' of the weights actually used and 'visual_area' of the found or
        specified visual area.
      * 'masked_data': the subsampled retinotopy data from the hemi/mesh.
    Note that if a visual_area property is found or provided (see options below), instead of
    yielding a map of the above, a lazy map whose keys are the visual areas and whose values are the
    maps described above is yielded instead.

    The following named options are accepted (in order):
      * retinotopy ('any') specifies the value passed to the retinotopy_data function to obtain the
        retinotopic mapping data; this may be a map of such data.
      * surface ('midgray') specifies the surface to use.
      * mask (None) specifies the mask to use.
      * weights, weight_min, weight_transform (Ellipsis, 0, Ellipsis) are used as in the
        to_property() function  in neuropythy.geometry except weights, which, if equal to Ellipsis,
        attempts to use the weights found by retinotopy_data() if any.
      * visual_area (Ellipsis) specifies the property to use for the visual area label; Ellipsis is
        equivalent to whatever visual area label is found by the retinotopy_data() function if any.
      * visual_area_mask (Ellipsis) specifies which visual areas to include in the returned maps,
        assuming a visual_area property is found; Ellipsis is equivalent to everything but 0; None
        is equivalent to everything.
      * eccentricity_range (None) specifies the eccentricity range to include.
      * polar_angle_range (None) specifies the polar_angle_range to include.
    '''
    if is_mag_data(hemi): return hemi
    elif pimms.is_lazy_map(hemi) and pimms.is_vector(hemi.keys(), 'int'): return hemi
    if mri.is_subject(hemi): hemi = (hemi.lh. hemi.rh)
    if pimms.is_vector(hemi):
        return tuple([mag_data(h, retinotopy=retinotopy, surface=surface, mask=mask,
                               weights=weights, weight_min=weight_min,
                               weight_transform=weight_transform, visual_area=visual_area,
                               visual_area_mask=visual_area_mask,
                               eccentricity_range=eccentricity_range,
                               polar_angle_range=polar_angle_range)
                      for h in hemi])
    # get the mesh
    mesh = geo.to_mesh((hemi, surface))
    # First, find the retino data
    retino = retinotopy_data(hemi, retinotopy)
    # we can process the rest the mask now, including weights and ranges
    if weights is Ellipsis: weights = retino.get('variance_explained', None)
    mask = hemi.mask(mask, indices=True)
    (arng,erng) = (polar_angle_range, eccentricity_range)
    (ang,ecc) = (retino['polar_angle'], retino['eccentricity'])
    if pimms.is_str(arng):
        tmp = to_hemi_str(arng)
        arng = (-180,0) if tmp == 'rh' else (0,180) if tmp == 'lh' else (-180,180)
    elif arng is None:
        tmp = ang[mask]
        tmp = tmp[np.isfinite(tmp)]
        arng = (np.min(tmp), np.max(tmp))
    if erng is None:
        tmp = ecc[mask]
        tmp = tmp[np.isfinite(tmp)]
        erng = (0, np.max(tmp))
    elif pimms.is_scalar(erng): erng = (0, erng)
    (ang,wgt) = hemi.property(retino['polar_angle'], weights=weights, weight_min=weight_min,
                              weight_transform=weight_transform, yield_weight=True)
    ecc = hemi.property(retino['eccentricity'], weights=weights, weight_min=weight_min,
                        weight_transform=weight_transform, data_range=erng)
    # apply angle range if given
    ((mn,mx),mid) = (arng, np.mean(arng))
    oks = mask[np.isfinite(ang[mask])]
    u = ang[oks]
    u = np.mod(u + 180 - mid, 360) - 180 + mid
    ang[oks[np.where((mn <= u) & (u < mx))[0]]] = np.inf
    # mark/unify the out-of-range ones
    bad = np.where(np.isinf(ang) | np.isinf(ecc))[0]
    ang[bad] = np.inf
    ecc[bad] = np.inf
    wgt[bad] = 0
    wgt *= zinv(np.sum(wgt[mask]))
    # get visual and surface coords
    vcoords = np.asarray(as_retinotopy(retino, 'geographical'))
    scoords = mesh.coordinates
    # now figure out the visual area so we can call down if we need to
    if visual_area is Ellipsis: visual_area = retino.get('visual_area', None)
    if visual_area is not None: retino['visual_area'] = visual_area
    if wgt is not None: retino['weights'] = wgt
    rdata = pimms.persist(retino)
    # calculate the range area
    (tmn,tmx) = [np.pi/180.0 * u for u in arng]
    if tmx - tmn >= 2*np.pi: (tmn,tmx) = (-np.pi,np.pi)
    (emn,emx) = erng
    rarea = 0.5 * (emx*emx - emn*emn) * (tmx - tmn)
    # okay, we have the data organized; we can do the calculation based on this, but we may have a
    # visual area mask to apply as well; here's how we do it regardless of mask
    def finish_mag_data(mask):
        if len(mask) == 0: return None
        # now that we have the mask, we can subsample
        submesh = mesh.submesh(mask)
        mask = mesh.tess.index(submesh.labels)
        mdata = pyr.pmap({k:(v[mask]   if pimms.is_vector(v) else
                             v[:,mask] if pimms.is_matrix(v) else
                             None)
                          for (k,v) in six.iteritems(rdata)})
        fs = submesh.tess.indexed_faces
        (vx, sx)  = [x[:,mask]                        for x in (vcoords, scoords)]
        (vfx,sfx) = [np.asarray([x[:,f] for f in fs]) for x in (vx,      sx)]
        (va, sa)  = [geo.triangle_area(*x)            for x in (vfx, sfx)]
        return pyr.m(surface_coordinates=sx, visual_coordinates=vx,
                     surface_areas=sa,       visual_areas=va,
                     mesh=mesh,              submesh=submesh,
                     retinotopy_data=rdata,  masked_data=mdata,
                     mask=mask,              area_of_range=rarea)
    # if there's no visal area, we just use the mask as is
    if visual_area is None: return finish_mag_data(mask)
    # otherwise, we return a lazy map of the visual area mask values
    visual_area = hemi.property(visual_area, mask=mask, null=0, dtype=np.int)
    vam = (np.unique(visual_area)                    if visual_area_mask is None     else
           np.setdiff1d(np.unique(visual_area), [0]) if visual_area_mask is Ellipsis else
           np.unique(list(visual_area_mask)))
    return pimms.lazy_map({va: curry(finish_mag_data, mask[visual_area[mask] == va])
                           for va in vam})
Exemple #24
0
def mesh_register(mesh,
                  field,
                  max_steps=2000,
                  max_step_size=0.05,
                  max_pe_change=1,
                  method='random',
                  return_report=False,
                  initial_coordinates=None):
    '''
    mesh_register(mesh, field) yields the mesh that results from registering the given mesh by
    minimizing the given potential field description over the position of the vertices in the
    mesh. The mesh argument must be a Mesh object (see neuropythy.geometry) such as can be read
    from FreeSurfer using the neuropythy.freesurfer_subject function. The field argument must be
    a list of field names and arguments; with the exception of 'mesh' (or 'standard'), the 
    arguments must be a list, the first element of which is the field type name, the second
    element of which is the field shape name, and the final element of which is a dictionary of
    arguments accepted by the field shape.

    The following are valid field type names:
      * 'mesh' : the standard mesh potential, which includes an edge potential, an angle
        potential, and a perimeter potential. Accepts no arguments, and must be passed as a
        single string instead of a list.
      * 'edge': an edge potential field in which the potential is a function of the change in the
        edge length, summed over each edge in the mesh.
      * 'angle': an angle potential field in which the potential is a function of the change in
        the angle measure, summed over all angles in the mesh.
      * 'perimeter': a potential that depends on the vertices on the perimeter of a 2D mesh
        remaining in place; the potential changes as a function of the distance of each perimeter
        vertex from its reference position.
      * 'anchor': a potential that depends on the distance of a set of vertices from fixed points
        in space. After the shape name second argument, an anchor must be followed by a list of
        vertex ids then a list of fixed points to which the vertex ids are anchored:
        ['anchor', shape_name, vertex_ids, fixed_points, args...].

    The following are valid shape names:
      * 'harmonic': a harmonic function with the form (c/q) * abs(x - x0)^q.
        Parameters: 
          * 'scale', the scale parameter c; default: 1.
          * 'order', the order parameter q; default: 2.
      * 'Lennard-Jones': a Lennard-Jones function with the form c (1 + (r0/r)^q - 2(r0/r)^(q/2));
        Parameters:
          * 'scale': the scale parameter c; default: 1. 
          * 'order': the order parameter q; default: 2.
      * 'Gaussian': A Gaussian function with the form c (1 - exp(-0.5 abs((x - x0)/s)^q))
        Parameters:
          * 'scale': the scale parameter c; default: 1.
          * 'order': the order parameter q; default: 2.
          * 'sigma': the standard deviation parameter s; default: 1.
      * 'infinite-well': an infinite well function with the form 
        c ( (((x0 - m)/(x - m))^q - 1)^2 + (((M - x0)/(M - x))^q - 1)^2 )
        Parameters:
          * 'scale': the scale parameter c; default: 1.
          * 'order': the order parameter q; default: 0.5.
          * 'min': the minimum value m; default: 0.
          * 'max': the maximum value M; default: pi.

    Options: The following optional arguments are accepted.
      * max_steps (default: 2000) the maximum number of steps to minimize for.
      * max_step_size (default: 0.1) the maximum distance to allow a vertex to move in a single
        minimization step.
      * max_pe_change: the maximum fraction of the initial potential value that the minimizer
        should minimize away before returning; i.e., 0 indicates that no minimization should be
        allowed while 0.9 would indicate that the minimizer should minimize until the potential
        is 10% or less of the initial potential.
      * return_report (default: False) indicates that instead of returning the registered data,
        mesh_register should instead return the Java Minimizer.Report object (for debugging).
      * method (default: 'random') specifies the search algorithm used; available options are 
        'random', 'nimble', and 'pure'. Generally all options will converge on a similar solution,
        but usually 'random' is fastest. The 'pure' option uses the nben library's step function,
        which performs straight-forward gradient descent. The 'nimble' option performs a gradient
        descent in which subsets of vertices in the mesh that have the highest gradients during the
        registration are updated more often than those vertices with small gradients; this can
        sometimes but not always increase the speed of the minimization. Note that instead of
        'nimble', one may alternately provide ('nimble', k) where k is the number of partitions that
        the vertices should be sorted into (by partition). 'nimble' by itself is equivalent to 
        ('nimble', 4). Note also that a single step of nimble minimization is equivalent to 2**k
        steps of 'pure' minimization. Finally, the 'random' option uses the nben library's
        randomStep function, which is a gradient descent algorithm that moves each vertex in the
        direction of its negative gradient during each step but which randomizes the length of the
        gradient at each individual vertex by drawing from an exponential distribution centered at
        the vertex's actual gradient length. In effect, this can prevent vertices with very large
        gradients from dominating the minimization and often results in the best results.
      * initial_coordinates (default: None) specifies the start coordinates of the registration;
        if None, uses those in the given mesh, which is generally desired.

    Examples:
      registered_mesh = mesh_register(
         mesh,
         [['edge', 'harmonic', 'scale', 0.5], # slightly weak edge potential
          ['angle', 'infinite-well'], # default arguments for an infinite-well angle potential
          ['anchor', 'Gaussian', [1, 10, 50], [[0.0, 0.0], [1.1, 1.1], [2.2, 2.2]]]],
         max_step_size=0.05,
         max_steps=10000)
    '''
    # Sanity checking:
    # First, make sure that the arguments are all okay:
    if not isinstance(mesh, geo.Mesh):
        raise RuntimeError(
            'mesh argument must be an instance of neuropythy.geometry.Mesh')
    if not pimms.is_vector(max_steps): max_steps = [max_steps]
    for ms in max_steps:
        if not pimms.is_int(ms) or ms < 0:
            raise RuntimeError('max_steps argument must be a positive integer')
    if not pimms.is_vector(max_step_size): max_step_size = [max_step_size]
    for mss in max_step_size:
        if not pimms.is_number(mss) or mss <= 0:
            raise RuntimeError('max_step_size must be a positive number')
    if not pimms.is_number(
            max_pe_change) or max_pe_change <= 0 or max_pe_change > 1:
        raise RuntimeError(
            'max_pe_change must be a number x such that 0 < x <= 1')
    if pimms.is_vector(method):
        if method[0].lower(
        ) == 'nimble' and len(method) > 1 and not pimms.is_str(method[1]):
            method = [method]
    else:
        method = [method]
    if initial_coordinates is None:
        init_coords = mesh.coordinates
    else:
        init_coords = np.asarray(initial_coordinates)
        if init_coords.shape[0] != mesh.coordinates.shape[0]:
            init_coords = init_coords.T
    # If steps is 0, we can skip most of this...
    if np.sum(max_steps) == 0:
        if return_report: return None
        else: return init_coords
    # Otherwise, we run at least some minimization
    max_pe_change = float(max_pe_change)
    nrounds = len(max_steps)
    if nrounds > 1:
        if len(max_step_size) == 1:
            max_step_size = [max_step_size[0] for _ in max_steps]
        if len(method) == 1: method = [method[0] for _ in max_steps]
    # Parse the field argument.
    faces = to_java_ints(mesh.tess.indexed_faces)
    edges = to_java_ints(mesh.tess.indexed_edges)
    coords = to_java_doubles(mesh.coordinates)
    init_coords = coords if init_coords is mesh.coordinates else to_java_doubles(
        init_coords)
    potential = _parse_field_arguments(field, faces, edges, coords)
    # Okay, that's basically all we need to do the minimization...
    rep = []
    for (method, max_step_size, max_steps) in zip(method, max_step_size,
                                                  max_steps):
        minimizer = java_link().jvm.nben.mesh.registration.Minimizer(
            potential, init_coords)
        max_step_size = float(max_step_size)
        max_steps = int(max_steps)
        if pimms.is_str(method):
            method = method.lower()
            if method == 'nimble': k = 4
            else: k = 0
        else:
            k = method[1]
            method = method[0].lower()
        if method == 'pure':
            r = minimizer.step(max_pe_change, max_steps, max_step_size)
        elif method == 'random':
            # if k is -1, we do the inverse version where we draw from the 1/mean distribution
            r = minimizer.randomStep(max_pe_change, max_steps, max_step_size,
                                     k == -1)
        elif method == 'nimble':
            r = minimizer.nimbleStep(max_pe_change, max_steps, max_step_size,
                                     int(k))
        else:
            raise ValueError('Unrecognized method: %s' % method)
        rep.append(r)
        init_coords = minimizer.getX()
    # Return the report if requested
    if return_report:
        return rep
    else:
        result = init_coords
        return np.asarray([[x for x in row] for row in result])
Exemple #25
0
def detect_credentials(config_name, extra_environ=None, filenames=None,
                       aws_profile_name=None, default_value=Ellipsis):
    '''
    detect_credentials(config_name) attempts to locate Amazon S3 Bucket credentials from the given
      configuration item config_name.

    The following optional arguments are accepted:
      * extra_environ (default: None) may specify a string or a tuple (key_name, secret_name) or a
        list of strings or tuples; strings are treated as an additional environment variable that
        should be checked for credentials while tuples are treated as paired varialbes: if both are
        defined, then they are checked as separate holders of a key/secret pair. Note that a list
        of strings is considered a pair of solo environment varialbes while a tuple of strings is
        considered a single (key_name, secret_name) pair.
      * filenames (default: None) may specify a list of filenames that are checked in order for
        credentials.
      * aws_profile_name (default: None) may specify a profile name that appears in the
        ~/.aws/credentials file that will be checked for aws_access_key_id and aws_secret_access_key
        values. The files ~/.amazon/credentials and ~/.credentials are also checked. Note that this
        may be a list of profiles to check.
      * default_value (default: Ellipsis) may specify a value to return when no credentials are
        found; if this value is None, then it is always returned; otherwise, the value is passed
        through to_credentials() and any errors are allowed to propagate out of
        detect_credentials(). If default_value is Ellipsis then an error is simply raised stating
        that no credentials could be found.

    The detect_credentials() function looks at the following locations in the following order,
    assuming that it has been provided with the relevant information:
      * first, if the Neuropythy configuration variable config_name is set via either the npythyrc
        file or the associated environment variable, then it is coerced into credentials;
      * next, if the environment contains both the variables key_name and secret_name (from the 
        optional argument key_secret_environ), then these values are used;
      * next, if the filenames argument is given, then all files it refers to are checked for
        credentials; these files are expanded with both os.expanduser and os.expandvars.
      * finally, if no credentials were detected, an error is raised.
    '''
    # Check the config first:
    if config_name is not None and config[config_name] is not None: return config[config_name]
    # Okay, not found there; check the key/secret environment variables
    if   extra_environ is None: extra_environ = []
    elif pimms.is_str(extra_environ): extra_environ = [extra_environ]
    elif pimms.is_vector(extra_environ):
        if pimms.is_vector(extra_environ, str):
            if len(extra_environ) == 2 and isinstance(extra_environ, _tuple_type):
                extra_environ = [extra_environ]
    elif not pimms.is_matrix(extra_environ, str):
        raise ValueError('extra_environ must be a string, tuple of strings, or list of these')
    for ee in extra_environ:
        if pimms.is_str(ee):
            if ee in os.environ:
                try:    return to_credentials(q)
                except: pass
        elif pimms.is_vector(ee, str) and len(ee) == 2:
            if ee[0] in os.environ and ee[1] in os.environ:
                (k,s) = [os.environ[q] for q in ee]
                if len(k) > 0 and len(s) > 0: continue
                return (k,s)
        else: raise ValueError('cannot interpret extra_environ argument: %s' % ee)
    # Okay, next we check the filenames
    if filenames is None: filenames = []
    elif pimms.is_str(filenames): filenames = [filenames]
    for flnm in filenames:
        flnm = os.expanduser(os.expandvars(flnm))
        if os.path.isfile(flnm):
            try:    return to_credentials(flnm)
            except: pass
    # okay... let's check the AWS credentials file, if it exists
    if pimms.is_str(aws_profile_name): aws_profile_name = [aws_profile_name]
    elif aws_profile_name is None or len(aws_profile_name) == 0: aws_profile_name = None
    elif not pimms.is_vector(aws_profile_name, str):
        raise ValueError('Invalid aws_profile_name value: %s' % aws_profile_name)
    if aws_profile_name is not None:
        try:
            cc = confparse.ConfigParser()
            cc.read([os.expanduser(os.path.join('~', '.aws', 'credentials')),
                     os.expanduser(os.path.join('~', '.amazon', 'credentials')),
                     os.expanduser(os.path.join('~', '.credentials'))])
            for awsprof in aws_profile_names:
                try:
                    aws_access_key_id     = cc.get(awsprof, 'aws_access_key_id')
                    aws_secret_access_key = cc.get(awsprof, 'aws_secret_access_key')
                    return (aws_access_key_id, aws_secret_access_key)
                except: pass
        except: pass
    # no match!
    if default_value is None:
        return None
    elif default_value is Ellipsis:
        if config_name is None: raise ValueError('No valid credentials were detected')
        else: raise ValueError('No valid credentials (%s) were detected' % config_name)
    else: return to_credentials(default_value)
Exemple #26
0
def disk_vmag(hemi, retinotopy='any', to=None, **kw):
    '''
    disk_vmag(mesh) yields the visual magnification based on the projection of disks on the cortical
      surface into the visual field.

    All options accepted by mag_data() are accepted by disk_vmag().
    '''
    mdat = mag_data(hemi, retinotopy=retinotopy, **kw)
    if pimms.is_vector(mdat): return tuple([face_vmag(m, to=to) for m in mdat])
    elif pimms.is_vector(mdat.keys(), 'int'):
        return pimms.lazy_map({k: curry(lambda k: disk_vmag(mdat[k], to=to), k)
                               for k in six.iterkeys(mdat)})
    # for disk cmag we start by making sets of circular points around each vertex
    msh  = mdat['submesh']
    n    = msh.vertex_count
    vxy  = mdat['visual_coordinates'].T
    sxy  = msh.coordinates.T
    neis = msh.tess.indexed_neighborhoods
    nnei = np.asarray(list(map(len, neis)))
    emax = np.max(nnei)
    whs  = [np.where(nnei > k)[0] for k in range(emax)]
    neis = np.asarray([u + (-1,)*(emax - len(u)) for u in neis])
    dist = np.full((n, emax), np.nan)
    for (k,wh) in enumerate(whs):
        nei = neis[wh,k]
        dxy = sxy[nei] - sxy[wh]
        dst = np.sqrt(np.sum(dxy**2, axis=1))
        dist[wh,k] = dst
    # find min dist from each vertex
    ww  = np.where(np.max(np.isfinite(dist), axis=1) == 1)[0]
    whs = [np.intersect1d(wh, ww) for wh in whs]
    radii  = np.full(n, np.nan)
    radii[ww] = np.nanmin(dist[ww], 1)
    dfracs = (radii * zinv(dist.T)).T
    ifracs = 1 - dfracs
    # make points that distance from each edge
    ellipses = np.full((n, emax, 2), np.nan)
    for (k,wh) in enumerate(whs):
        xy0 = vxy[wh]
        xyn = vxy[neis[wh,k]]
        ifr = ifracs[wh,k]
        dfr = dfracs[wh,k]
        uxy1 = xy0 * np.transpose([ifr, ifr])
        uxy2 = xyn * np.transpose([dfr, dfr])
        uxy = (uxy1 + uxy2)
        ellipses[wh,k,:] = uxy - xy0
    # we want to rotate the points to be along their center's implied rad/tan axis
    vrs  = np.sqrt(np.sum(vxy**2, axis=1))
    irs  = zinv(vrs)
    coss = vxy[:,0] * irs
    sins = vxy[:,1] * irs
    # rotating each ellipse by negative-theta gives us x-radial and y=tangential
    cels = (coss * ellipses.T)
    sels = (sins * ellipses.T)
    rots = np.transpose([cels[0] + sels[1], cels[1] - sels[0]], [1,2,0])
    # now we fit the best rad/tan-oriented ellipse we can with the given center
    rsrt = np.sqrt(np.sum(rots**2, axis=2)).T
    (csrt,snrt) = zinv(rsrt) * rots.T
    # ... a*cos(rots) + b*sin(rots) ~= r(rots) where a = radial vmag and b = tangential vmag
    axes = []
    cods = []
    idxs = []
    for (r,c,s,k,irad,i) in zip(rsrt,csrt,snrt,nnei,zinv(radii),range(len(nnei))):
        if k < 3: continue
        (c,s,r) = [u[:k] for u in (c,s,r)]
        # if the center point is way outside the min/max, skip it
        (x,y) = (r*c, r*s)
        if len(np.unique(np.sign(x))) < 2 or len(np.unique(np.sign(y))) < 2: continue
        mudst = np.sqrt(np.sum(np.mean([x, y], axis=1)**2))
        if mudst > np.min(r): continue
        # okay, fit an ellipse...
        fs = np.transpose([c,s])
        try:
            (ab,rss,rnk,svs) = np.linalg.lstsq(fs, r, rcond=None)
            if len(rss) == 0 or rnk < 2 or np.min(svs/np.sum(svs)) < 0.01: continue
            axes.append(np.abs(ab) * irad)
            cods.append(1 - rss[0]*zinv(np.sum(r**2)))
            idxs.append(i)
        except Exception as e: continue
    (axes, cods, idxs) = [np.asarray(u) for u in (axes, cods, idxs)]
    return (idxs, axes, cods)
Exemple #27
0
def calc_retinotopy(hemisphere_data,
                    hemisphere_tags,
                    label_data,
                    angles='*.prf_angle.mgz',
                    eccens='*.prf_eccen.mgz',
                    weights='*.prf_vexpl.mgz'):
    '''
    calc_retinotopy imports the raw retinotopy data for the given subject.

    Afferent parameters:
      @ angles 
        The filenames of the polar-angle files that are needed for each hemisphere. For more 
        information on how these files are specified, see the help text for the labels parameter.
        If angles is not supplied, then the default value is '*.prf_angle.mgz'. Polar angles
        MUST be encoded in clockwise degrees of rotation starting from the positive y-axis.
      @ eccens 
        The filenames of the eccentricity files that are needed for each hemisphere. For more 
        information on how these files are specified, see the help text for the labels parameter.
        If eccens is not supplied, then the default value is '*.prf_eccen.mgz'. Eccentricity
        MUST be encoded in degrees of visual angle.
      @ weights 
        The filenames of the weights (usually fraction of variance explained) files that are needed
        for each hemisphere. For more innformation on how these files are specified, see the help
        text for the labels parameters. If eccens is not supplied, then the default value is
        '*.prf_vexpl.mgz'. Variance explained should be encoded as a fraction with 1 indicating
        100% variance explained.
    '''
    retino = {}
    for (k, val) in zip(['angle', 'eccen', 'weight'],
                        [angles, eccens, weights]):
        if not pimms.is_str(val):
            raise ValueError('could not understand non-string %ss' % k)
        # first, it might just be a template pattern
        fls = {}
        if '*' in val:
            sparts = val.split('*')
            for h in hemisphere_tags:
                flnm = h.join(sparts)
                fls[h] = os.path.expanduser(os.path.expandvars(flnm))
        else:
            # okay, separate commas...
            lsplit = val.split(',')
            for (kk, l) in enumerate(lsplit):
                if ':' in l: (tag, name) = l.split(':')
                elif kk < len(hemisphere_tags):
                    (tag, name) = (hemisphere_tags[kk], l)
                else:
                    raise ValueError('could not match %ss to hemispheres' %
                                     (k, ))
                if tag not in hemisphere_data:
                    raise ValueError('Tag %s (in %ss arg) does not exist' %
                                     (tag, k))
                fls[tag] = os.path.expanduser(os.path.expandvars(name))
        retino[k] = fls
    # now go through and load them
    res = {}
    for (k, fls) in six.iteritems(retino):
        rr = {}
        for (tag, name) in six.iteritems(fls):
            if not os.path.isfile(name):
                raise ValueError('%ss filename %s not found' % (
                    k,
                    name,
                ))
            hem = hemisphere_data[tag]
            tmp = nyio.load(name)
            if not pimms.is_vector(tmp) or len(tmp) != hem.vertex_count:
                raise ValueError('%ss file %s does not contain label data' % (
                    k,
                    name,
                ))
            rr[tag] = np.asarray(tmp)
        res[k] = rr
    return {'raw_retinotopy': pimms.persist(res)}