Beispiel #1
0
 def name_lookup(self, ii):
     '''
     lblidx.name_lookup(ii) yields the names associated with the labels with the given ids. If
       ii is a list of ids, then yields an array of names.
     '''
     if pimms.is_int(ii):
         return self.by_id[ii].name if ii in self.by_id else None
     elif pimms.is_str(ii):
         return self.by_name[ii].name if ii in self.by_name else None
     else:
         return np.asarray([
             tbl[ii].name if ii in tbl else None for ii in ii
             for tbl in [self.by_name if pimms.is_str(ii) else self.by_id]
         ])
Beispiel #2
0
 def color_lookup(self, ii):
     '''
     lblidx.color_lookup(ids) yields the color(s) associated with the labels with the given ids.
       If ids is a list of ids, then yields a matrix of colors.
     lblidx.color_lookup(names) uses the names to lookup the label colors.
     '''
     if pimms.is_int(ii):
         return self.by_id[ii].color if ii in self.by_id else None
     elif pimms.is_str(ii):
         return self.by_name[ii].color if ii in self.by_name else None
     else:
         return np.asarray([
             tbl[ii].color if ii in tbl else None for ii in ii
             for tbl in [self.by_name if pimms.is_str(ii) else self.by_id]
         ])
Beispiel #3
0
def address_data(data, dims=None, surface=0.5, strict=True):
    '''
    address_data(addr) yields the tuple (faces, coords) of the address data where both faces and
      coords are guaranteed to be numpy arrays with sizes (3 x n) and (d x n); this will coerce
      the data found in addr if necessary to do this. If the data is not valid, then an error is
      raised. If the address is empty, this yields (None, None).

    The following options may be given:
       * dims (default None) specifies the dimensions requested for the coordinates. If 2, then
         the final dimension is dropped from 3D coordinates; if 3 then will add the optional
         surface argument as the final dimension of 2D coordinates.
       * surface (default: 0.5) specifies the surface to use for 2D addresses when a 3D address;
         is requested. If None, then an error will be raised when this condition is encountered.
         This should be either 'white', 'pial', 'midgray', or a real number in the range [0,1]
         where 0 is the white surface and 1 is the pial surface.
       * strict (default: True) specifies whether an error should be raised when there are
         non-finite values found in the faces or the coordinates matrices. These values are usually
         indicative of an attempt to address a point that was not inside the mesh/cortex.
    '''
    if data is None: return (None, None)
    if not is_address(data): raise ValueError('argument is not an address')
    faces = np.asarray(data['faces'])
    coords = np.asarray(data['coordinates'])
    if len(faces.shape) > 2 or len(coords.shape) > 2:
        raise ValueError('address data contained high-dimensional arrays')
    elif len(faces.shape) != len(coords.shape):
        raise ValueError('address data faces and coordinates are different shapes')
    elif len(faces) == 0: return (None, None)
    if len(faces.shape) == 2 and faces.shape[0] != 3: faces = faces.T
    if faces.shape[0] != 3: raise ValueError('address contained bad face matrix')
    if len(coords.shape) == 2 and coords.shape[0] not in (2,3): coords = coords.T
    if coords.shape[0] not in (2,3): raise ValueError('address coords are neither 2D nor 3D')
    if dims is None: dims = coords.shape[0]
    elif coords.shape[0] != dims:
        if dims == 2: coords = coords[:2]
        else:
            if surface is None: raise ValueError('address data must be 3D')
            elif pimms.is_str(surface):
                surface = surface.lower()
                if surface == 'pial': surface = 1
                elif surface == 'white': surface = 0
                elif surface in ('midgray', 'mid', 'middle'): surface = 0.5
                else: raise ValueError('unrecognized surface name: %s' % surface)
            if not pimms.is_real(surface) or surface < 0 or surface > 1:
                raise ValueError('surface must be a real number in [0,1]')
            coords = np.vstack((coords, np.full((1, coords.shape[1]), surface)))
    if strict:
        if np.sum(np.logical_not(np.isfinite(coords))) > 0:
            w = np.where(np.logical_not(np.isfinite(coords)))
            if len(w[0]) > 10:
                raise ValueError('address contains %d non-finite coords' % len(w[0]))
            else:
                raise ValueError('address contains %d non-finite coords (%s)' % (len(w),w))
        if np.sum(np.logical_not(np.isfinite(faces))) > 0:
            w = np.where(np.logical_not(np.isfinite(faces)))
            if len(w[0]) > 10:
                raise ValueError('address contains %d non-finite faces' % len(w[0]))
            else:
                raise ValueError('address contains %d non-finite faces (%s)' % (len(w[0]),w))
    return (faces, coords)
Beispiel #4
0
def exporter(name, extensions=None, sniff=None):
    '''
    @exporter(name) is a decorator that declares that the following function is an file saveing
      function that should be registered with the neuropythy save function. See also the
      forget_exporter function.

    Any exporter function must take, as its first argument, a filename and, as its second argument,
    the object to be exported; after that it may take any number of keyword arguments, but no other
    non-keyword arguments. These keyword arguments can be passed to the neuropythy save function.
    
    The following options are accepted:
      * extensions (default: None) may be a string or a collection of strings that indicate possible
        file extensions for files of this type.
      * sniff (default: None) may optionally be a function f(s, d) that yields True when the given
        string s is a filename for a file of this type and/or the given object d is an object that
        can be exported as this type. If no sniff is given, this type can still be detected by
        running all exporters exhaustively and catching any raised exceptions; though this may
        result in partial files written to disk, so is not used by save.
    '''
    name = name.lower()
    if name in exporters:
        raise ValueError(
            'An exporter for type %s already exists; use forget_exporter' %
            name)
    extensions = (extensions,) if pimms.is_str(extensions) else \
                 ()            if extensions is None       else \
                 tuple(extensions)

    def _exporter(f):
        global exporters
        exporters = exporters.set(name, (f, extensions, sniff))
        setattr(save, name, f)
        return f

    return _exporter
Beispiel #5
0
def to_default_alignment_value(x):
    if not pimms.is_str(x): raise ValueError('hcp_default_alignment must be a string')
    x = x.lower()
    if   x in ('msmsulc', 'sulc'): x = 'MSMSulc'
    elif x in ('msmall', 'all'):   x = 'MSMAll'
    else: raise ValueError('invalid value for hcp_default_alignment: %s')
    return x
Beispiel #6
0
def to_image_type(image_type):
    '''
    to_image_type(image_type) yields an image-type class equivalent to the given image_type
      argument, which may be a type name or alias or an image or header object or class.    
    '''
    if image_type is None: return None
    if isinstance(image_type, type) and issubclass(image_type, ImageType):
        return image_type
    if pimms.is_str(image_type):
        image_type = image_type.lower()
        if image_type in image_types_by_name:
            return image_types_by_name[image_type]
        for it in image_types:
            if image_type in it.aliases(): return it
        raise ValueError('"%s" is not a valid image-type name or alias' %
                         image_type)
    for x in (image_type, type(image_type)):
        try:
            return image_types_by_image_type[x]
        except Exception:
            pass
        try:
            return image_types_by_header_type[x]
        except Exception:
            pass
    raise ValueError('Unsupported image type: %s' % image_type)
Beispiel #7
0
def lookup_labels(labels, data_by_labels, **kwargs):
    '''
    sco.util.lookup_labels(labels, data_by_labels) yields a list the same size as labels in which
      each element i of the list is equal to data_by_labels[labels[i]].
    
    The option null may additionally be passed to lookup_labels; if null is given, then whenever a
    label value from data is not found in labels, it is instead given the value null; if null is not
    given, then an error is raised in this situation.

    If the data_by_labels given is a string, then lookup_labels attempts to use the value
    global_lookup(data_by_labels) in its place.

    The lookup_labels function expects the labels to be integer or numerical values.
    '''
    res = None
    null = None
    raise_q = True
    if _pimms.is_str(data_by_labels): data_by_labels = global_lookup(data_by_labels)
    if 'null' in kwargs:
        null = kwargs['null']
        raise_q = False
    if len(kwargs) > 1 or (len(kwargs) > 0 and 'null' not in kwargs):
        raise ValueError('Unexpected option given to lookup_labels; only null is accepted')
    if raise_q:
        try:
            res = [data_by_labels[lbl] for lbl in labels]
        except:
            raise ValueError('Not all labels found by lookup_labels and no null given')
    else:
        res = [data_by_labels[lbl] if lbl in data_by_labels else null for lbl in labels]
    return _pyr.pvector(res)
Beispiel #8
0
def init_image(direction='radial', max_eccen=12, image_size=None):
    '''
    init_image(direction, max_eccen, image_size) initializes an image for use
    with the *_image() functions below, and yields a tuple of (angle, eccen, 
    theta) where angle is an image of the polar angle of each pixel in ccw
    radians starting at tthe RHM, eccen is the eccentricity of each pixel in
    visual degrees, and theta is the angular component to the cos() funcntion
    such that cos(theta) yields the gradient image.
    '''
    # If we're given a string for the direction, we need to construct an image
    # that goes with it; otherwise we already have the image ready.
    if pimms.is_str(dirction):
        if image_size is None: image_size = 512
    else:
        if direction.shape[0] != direction.shape[1]:
            raise ValueError("square images are required")
        image_size = direction.shape[0]
    x = np.linspace(-max_eccen, max_eccen, image_size)
    (x,y) = np.meshgrid(x, x)
    ang = np.arctan2(y, x)
    ecc = np.sqrt(x**2 + y**2)
    # We also want an image of theta values
    if direction in ('radial', 'rad', 'r'):
        theta = ang
    elif direction in ('tangential', 'tan', 't'):
        theta = ang + np.pi/2
    elif direction in ('horizontal', 'hrz', 'h'):
        theta = np.pi/2
    elif direction in ('vertical', 'vrt', 'v'):
        theta = 0
    else:
        theta = direction
    return (ang, ecc, theta)
Beispiel #9
0
def to_retinotopy_cache_path(p):
    '''
    to_retinotopy_cache_path(p) yields p if p is a directory and raises an exception otherwise.
    '''
    if pimms.is_str(p) and os.path.isdir(p): return os.path.normpath(p)
    elif p is Ellipsis: return p
    else: return None
Beispiel #10
0
def str_to_credentials(s):
    '''
    str_to_credentials(s) yields (key, secret) if the given string is a valid representation of a
      set of credentials. Valid representations include '<key>:<secret>' and '<key>\n<secret>'. All
      initial and trailing whitespace is always stripped from both key and scret. If a newline
      appears in the string, then this character always takes precedense as the separator over a
      colon character. The given string may also be a json object, in which case it is parsed and
      considered valid if it is either a 
    '''
    if not pimms.is_str(s):
        raise ValueError('str_to_credentials requires a string argument')
    s = s.strip()
    # First try a json object:
    try:
        js = json.loads(s)
        return to_credentials(s)
    except Exception:
        pass
    # must be '<key>\n<secret>' or '<key>:<secret>'
    dat = s.split('\n')
    if len(dat) == 1: dat = s.split(':')
    if len(dat) != 2:
        raise ValueError(
            'String "%s" does not appear to be a credentials file' % s)
    return tuple([q.strip() for q in dat])
Beispiel #11
0
def to_credentials(arg):
    '''
    to_credentials(arg) converts arg into a pair (key, secret) if arg can be coerced into such a
      pair and otherwise raises an error.
    
    Possible inputs include:
      * A tuple (key, secret)
      * A mapping with the keys 'key' and 'secret'
      * The name of a file that can load credentials via the load_credentials() function
      * A string that separates the key and secret by ':', e.g., 'mykey:mysecret'
      * A string that separates the key and secret by a "\n", e.g., "mykey\nmysecret"
    '''
    if pimms.is_str(arg):
        try:
            return load_credentials(arg)
        except Exception:
            pass
        try:
            return str_to_credentials(arg)
        except Exception:
            raise ValueError(
                'String "%s" is neither a file containing credentials nor a valid'
                ' credentials string itself.' % arg)
    elif pimms.is_map(arg) and 'key' in arg and 'secret' in arg:
        return (arg['key'], arg['secret'])
    elif pimms.is_vector(arg, str) and len(arg) == 2:
        return tuple(arg)
    else:
        raise ValueError(
            'given argument cannot be coerced to credentials: %s' % arg)
Beispiel #12
0
def import_stimulus(stim, gcf):
    '''
    import_stimulus(stim, gcf) yields the imported image for the given stimulus argument stim; stim
    may be either a filename or an image array; the argument gcf must be the gamma correction
    function.
    '''
    if pimms.is_str(stim):
        try:              from skimage.io   import imread as skload
        except Exception: from skimage.data import load   as skload
        im = np.asarray(skload(stim), dtype=np.float)
    else:
        im = np.asarray(stim, dtype=np.float)
    if len(im.shape) == 3:
        # average the color channels
        im = np.mean(im, axis=2)
    if len(im.shape) != 2:
        raise ValueError('images must be 2D or 3D matrices')
    # We need to make sure this image is between 0 and 1; if not, we assume it's between 0 and 255;
    # for now it seems safe to automatically detect this
    mx = np.max(im)
    if   not np.isclose(mx, 65535) and mx > 65535: im /= 4294967295.0
    elif not np.isclose(mx, 255) and mx > 255:     im /= 65535.0
    elif not np.isclose(mx, 1) and mx > 1:         im /= 255.0
    # if we were given a color image,
    if gcf is not None: im = gcf(im)
    return im
def importer(name, extensions=None, sniff=None):
    '''
    @importer(name) is a decorator that declares that the following function is an file loading
      function that should be registered with the neuropythy load function. See also the
      forget_importer function.

    Any importer function must take, as its first argument, a filename; after that it may take any
    number of keyword arguments, but no other non-keyword arguments. These keyword arguments can be
    passed to the neuropythy load function.
    
    The following options are accepted:
      * extensions (default: None) may be a string or a collection of strings that indicate possible
        file extensions for files of this type.
      * sniff (default: None) may optionally be a function f(s) that yields True when the given
        string s is a filename for a file of this type. If no sniff is given, this type can still
        be detected by running the importer and catching any raised exception.
    '''
    name = name.lower()
    if name in importers:
        raise ValueError(
            'An importer for type %s already exists; see forget_importer' %
            name)
    if extensions is None: extensions = ()
    elif pimms.is_str(extensions): (extensions, )
    else: extensions = tuple(extensions)

    def _importer(f):
        global importers
        importers = importers.set(name, (f, extensions, sniff))
        setattr(load, name, f)
        return f

    return _importer
Beispiel #14
0
def _parse_field_argument(instruct, faces, edges, coords):
    _java = java_link()
    if pimms.is_str(instruct):
        insttype = instruct
        instargs = []
    elif hasattr(instruct, '__iter__'):
        insttype = instruct[0]
        instargs = instruct[1:]
    else:
        raise RuntimeError(
            'potential field instruction must be list/tuple-like or a string')
    # look this type up in the types data:
    insttype = insttype.lower()
    if insttype not in _parse_field_data_types:
        raise RuntimeError('Unrecognized field data type: ' + insttype)
    instdata = _parse_field_data_types[insttype]
    # if the data is a dictionary, we must parse on the next arg
    if pimms.is_map(instdata):
        shape_name = instargs[0].lower()
        instargs = instargs[1:]
        if shape_name not in instdata:
            raise RuntimeError('Shape ' + shape_name +
                               ' not supported for type ' + insttype)
        instdata = instdata[shape_name]
    # okay, we have a list of instructions... find the java method we are going to call...
    java_method = getattr(_java.jvm.nben.mesh.registration.Fields, instdata[0])
    # and parse the arguments into a list...
    java_args = [
        _parse_field_function_argument(a, instargs, faces, edges, coords)
        for a in instdata[1:]
    ]
    # and call the function...
    return java_method(*java_args)
Beispiel #15
0
def to_nonempty(s):
    '''
    to_nonempty(s) yields s if s is a nonempty string and otherwise raises an exception.
    '''
    if not pimms.is_str(s) or s == '':
        raise ValueError('cannot convert object to non-empty string')
    return s
Beispiel #16
0
 def _load(pdir, flnm, loadfn, *argmaps, **kwargs):
     inst = pimms.merge(*(argmaps + (kwargs, )))
     flnm = flnm.format(**inst)
     args = pimms.merge(*argmaps, **kwargs)
     #logging.info('FileMap: loading file "%s"...\n' % flnm) #debug
     try:
         lpth = pdir.local_path(flnm)
         #logging.info('     ... local path: %s\n' % lpth) #debug
         args = pimms.merge(*argmaps, **kwargs)
         loadfn = inst['load'] if 'load' in args else loadfn
         #filtfn = inst['filt'] if 'filt' in args else lambda x,y:x
         dat = loadfn(lpth, args)
         #dat = filtfn(dat, args)
     except Exception:
         dat = None
         #raise
     # check for miss instructions if needed
     if dat is None and 'miss' in argmaps: miss = args['miss']
     else: miss = None
     if pimms.is_str(miss) and miss.lower() in ('error', 'raise',
                                                'exception'):
         raise ValueError('File %s failed to load' % flnm)
     elif miss is not None:
         dat = miss(flnm, args)
     return dat
Beispiel #17
0
def disk_projection_cmag(mesh, retinotopy='any', surface=None):
    '''
    disk_projection_cmag(mesh) yields the cortical magnification based on the projection of disks
      on the cortical surface into the visual field.
    '''
    # First, find the retino data
    if pimms.is_str(retinotopy):
        retino = retinotopy_data(mesh, retinotopy)
    else:
        retino = retinotopy
    # Convert from polar angle/eccen to longitude/latitude
    vcoords = np.asarray(as_retinotopy(retino, 'geographical'))
    # note the surface coordinates
    if surface is None:
        tess = mesh.tess
        scoords = mesh.coordinates
    elif pimms.is_str(surface):
        if not isinstance(mesh, mri.Cortex):
            raise ValueError(
                'named surfaces can only be used with cortex objects')
        surface = surface.lower()
        if surface in ['white']: mesh = mesh.white_surface
        elif surface in ['middle', 'midgray', 'mid-gray']:
            mesh = mesh.midgray_surface
        elif surface in ['pial']:
            mesh = mesh.pial_surface
        elif surface in mesh.surfaces:
            mesh = mesh.surfaces[surface]
        else:
            raise ValueError('Unrecognized surface: %s' % surface)
        scoords = mesh.coordinates
        tess = mesh.tess
    elif isinstance(surface, geo.Mesh):
        tess = mesh.tess
        scoords = surface.coordinates
        if scoords.shape[1] > tess.vertex_count:
            scoords = scoords[:, surface.tess.index(tess.labels)]
    else:
        raise ValueError('Could not understand surface option')
    faces = tess.indexed_faces
    # okay, we have the data organized into scoords, vcoords, and faces;
    # let's get sfx and vfx (surface face coords and visual face coords)
    (sfx, vfx) = [
        np.asarray([x[:, ii] for ii in faces]) for x in (scoords, vcoords)
    ]
    # TODO
    raise ValueError('Not yet implemented')
Beispiel #18
0
 def database(r):
     '''
     hcp.database is the database name for the HCP dataset object; by default this is
     'hcp-openaccess' or whatever value is configured in neuropythy.config.
     '''
     if r is Ellipsis or r is None: return config['hcp_auto_database']
     elif not pimms.is_str(r): raise ValueError('HCP database must be a string')
     else: return r
 def url(url):
     '''
     url is the URL from which the performance-fields data is loaded.
     '''
     if url is None or url is Ellipsis:
         return VisualPerformanceFieldsDataset.osf_url
     if not pimms.is_str(u): raise ValueError('url must be a string')
     return u
Beispiel #20
0
def to_image(img, image_type=None, spec=None, **kwargs):
    '''
    to_image(array) yields a Nifti1Image of the given array with default meta-data spec.
    to_image(array, image_type) yields an image object of the given type; image_type may either be
      an image class or a class name (see supported types below).
    to_image((array, spec)) uses the given mapping of meta-data (spec) to construct the image-spec
      note that spec may simply be an affine transformation matrix or may be an image.
    to_image((array, affine, spec)) uses the given affine specifically (the given affine
      overrides any affine included in the spec meta-data).
    to_image(imspec) constructs an image with the properties specified in the given imspec; the
      special optional argument fill (default: 0.0) can be set to something else to specify what the
      default cell value should be.

    Note that the array may optionally be an image itself, in which case its spec is used as a
    starting point for the new spec. Any spec-data passed as a tuple overwrites this spec-data,
    and any spec-data passed as an optional argument overwrites this spec-data in turn.

    The first optional argument, specifying image_type is as an image type if possible, but if a
    spec-data mapping or equivalent (e.g., an image header or affine) is passed as the first
    argument it is used as such; otherwise, the optional third argument is named spec, and any
    additional keyword arguments passed to to_image are merged into this spec object left-to-right
    (i.e., keyword arguments overwrite the spec keys).

    If no affine is given and the image object given is an array then a FreeSurfer-like transform
    that places the origin at the center of the image.
    '''
    # make sure we return unchanged if no change requested
    if is_image(img) and image_type is None and spec is None and len(kwargs) == 0: return img
    elif is_image_spec(img):
        fill = kwargs.pop('fill', 0.0)
        return to_image(image_spec_to_image(img, fill=fill),
                        image_type=image_type, spec=spec, **kwargs)
    # quick cleanup of args:
    # we have a variety of things that go into spec; in order (where later overwrites earlier):
    # (1) img spec, (2) image_type map (if not an image type) (3) spec, (4) kw args
    # see if image_type is actually an image type (might be a spec/image)...
    if pimms.is_str(image_type) or isinstance(image_type, type):
        (image_type, s2) = (to_image_type(image_type), {})
    else: 
        (image_type, s2) = (None, {} if image_type is None else to_image_spec(image_type))
    if image_type is None: image_type = image_types_by_name['nifti1']
    s3 = {} if spec is None else to_image_spec(spec)
    # okay, next, parse the image argument itself:
    if is_tuple(img):
        if   len(img) == 1: (img,aff,s1) = (img[0], None, {})
        elif len(img) == 2: (img,aff,s1) = (img[0], None, img[1])
        elif len(img) == 3: (img,aff,s1) = img
        else: raise ValueError('cannot parse more than 3 elements from image tuple')
        # check that the affine wasn't given as the meta-data (e.g. (img,aff) instead of (img,mdat))
        if aff is None and s1 is not None:
            try:    (aff, s1) = (to_affine(s1, 3), {})
            except Exception: pass
    else: (aff,s1) = (None, {})
    s0 = to_image_spec(img)
    spec = pimms.merge(s0, s1, s2, s3, kwargs)
    if aff is not None: spec = pimms.assoc(spec, affine=to_affine(aff, 3))
    # okay, we create the image now:
    return image_type.create(img, meta_data=spec)
Beispiel #21
0
def to_subject_paths(paths):
    '''
    to_subject_paths(paths) accepts either a string that is a :-separated list of directories or a
     list of directories and yields a list of all the existing directories.
    '''
    if paths is None: return []
    if pimms.is_str(paths): paths = paths.split(':')
    paths = [os.path.expanduser(p) for p in paths]
    return [p for p in paths if os.path.isdir(p)]
Beispiel #22
0
 def cache_path(cp):
     '''
     pseudo_dir.cache_path is the optionally provided cache path; this is the same as the
     storage path unless this is None.
     '''
     if cp is None: return None
     if not pimms.is_str(cp):
         raise ValueError('cache_path must be a string')
     return os.path.expanduser(os.path.expandvars(cp))
Beispiel #23
0
 def _osf_exists(fls, osfbase, cache_path, path):
     cpath = os.path.join(cache_path, PseudoDir._url_to_ospath(path))
     if os.path.exists(cpath): return cpath
     fl = fls
     for pp in path.split('/'):
         if pimms.is_str(fl): return False
         elif pp in fl: fl = fl[pp]
         else: return False
     return True
Beispiel #24
0
 def cache_directory(cache_root, name, custom_directory):
     '''
     dataset.cache_directory is the directory in which the given dataset is cached.
     '''
     if custom_directory is not None: return custom_directory
     return os.path.join(cache_root,
                         (name if pimms.is_str(name) else
                          name[0] if len(name) == 1 else '%s_%x' %
                          (name[0], hash(name[1:]))))
Beispiel #25
0
 def source_path(sp):
     '''
     pseudo_dir.source_path is the source path of the the given pseudo-dir object.
     '''
     if sp is None: return os.path.join('/')
     if not pimms.is_str(sp):
         raise ValueError('source_path must be a string/path')
     if is_url(sp) or is_s3_path(sp): return sp
     return os.path.expanduser(os.path.expandvars(sp))
Beispiel #26
0
 def ensure_path(self, rpath, cpath):
     fl = self._find_url(rpath)
     if not pimms.is_str(fl):
         if not os.path.isdir(cpath): os.makedirs(cpath, mode=0o755)
         return cpath
     else:
         cdir = os.path.split(cpath)[0]
         if not os.path.isdir(cdir): os.makedirs(cdir, mode=0o755)
     return url_download(fl, cpath)
Beispiel #27
0
def calc_labels(subject, hemisphere_tags, hemisphere_data, labels=None):
    '''
    calc_labels finds the available label data for the subject on which the retinotopy operations
    are being performed.

    Afferent parameters:
      @ labels 
        The filenames of the files containing label data for the subject's hemispheres. Label data
        can be provided in mgz, annot, or curv files containing visual area labels, one per vertex.
        The labels argument may be specified as a comma-separated list of filenames (in the same
        order as the hemispheres, which are lh then rh by default) or as a single template filename
        that may contain the character * as a stand-in for the hemisphere tag. For example,
        '/data/*.v123_labels.mgz' would look for the file /data/lh.v123_labels.mgz for the 'lh'
        hemisphere and for /data/rh_LR32k.v123_labels.mgz for the 'rh_LR32k' hemisphere.
        Note that labels are not required--without labels, no field-sign minimization is performed,
        so retinotopic cleaning may be less reliable. Note that additionally, labels may be
        preceded by the relevant tag; so instead of '/data/*.v123_labels.mgz' with, as in the 
        example, hemispheres 'lh,rh_LR32k', one could use the arguments
        'lh:/data/lh.v123_labels.mgz,rh:/data/rh_LR32k.v123_labels.mgz' (for labels) and
        'lh,rh:rh_LR32k' for hemispheres.
    '''
    lbls = {}
    # no argument this is fine--no labels are used
    if lbls is None: return {'label_data': pyr.m()}
    if not pimms.is_str(labels):
        raise ValueError('could not understand non-string labels')
    # first, it might just be a template pattern
    fls = {}
    if '*' in labels:
        sparts = labels.split('*')
        for h in hemisphere_tags:
            flnm = h.join(sparts)
            fls[h] = os.path.expanduser(os.path.expandvars(flnm))
    else:
        # okay, separate commas...
        lsplit = labels.split(',')
        for (k, l) in enumerate(lsplit):
            if ':' in l: (tag, name) = l.split(':')
            elif k < len(hemisphere_tags):
                (tag, name) = (hemisphere_taks[k], l)
            else:
                raise ValueError('could not match labels to hemispheres')
            if tag not in hemisphere_data:
                raise ValueError('Tag %s (in labels arg) does not exist' %
                                 (tag, ))
            fls[tag] = os.path.expanduser(os.path.expandvars(name))
    for (tag, name) in six.iteritems(fls):
        if not os.path.isfile(name):
            raise ValueError('Labels filename %s not found' % (name, ))
        hem = hemisphere_data[tag]
        tmp = nyio.load(name)
        if not pimms.is_vector(tmp) or len(tmp) != hem.vertex_count:
            raise ValueError('Labels file %s does not contain label data' %
                             (name, ))
        lbls[tag] = np.asarray(tmp)
    return {'label_data': pimms.persist(lbls)}
Beispiel #28
0
 def to_name(nm):
     '''
     Dataset.to_name(name) yields a valid dataset name equivalent to the given name or raises an
       error if name is not valid. In order to be valid, a name must be either strings or a tuple
       of number and strings that start with a string.
     '''
     if pimms.is_str(nm): return nm
     if not pimms.is_vector(nm):
         raise ValueError('name must be a string or tuple')
     if len(nm) < 1:
         raise ValueError(
             'names that are tuples must have at least one element')
     if not pimms.is_str(nm):
         raise ValueError('names that are tuples must begin with a string')
     if not all(pimms.is_str(x) or pimms.is_number(x) for x in nm):
         raise ValueError(
             'dataset names that are tuples must contain only strings and numbers'
         )
     return tuple(nm)
Beispiel #29
0
 def custom_directory(d):
     '''
     dataset.custom_directory is None if no custom directory was provided for the given dataset;
       otherwise it is the provided custom directory.
     '''
     if d is None: return None
     if not pimms.is_str(d):
         raise ValueError('custom_directory must be a string')
     else:
         return d
Beispiel #30
0
 def label(l):
     '''
     prf.label is the visual area label of the pRF.
     '''
     if pimms.is_str(l):
         return l.lower()
     elif not pimms.is_int(l) or l < 0:
         raise ValueError('Labels must be positive integers or strings')
     else:
         return l