def _generate_subject_DROI_details(subjects,
                                       sid,
                                       h,
                                       eccentricity_range=None,
                                       angles=None,
                                       min_variance_explained=0,
                                       method=None):
        from neuropythy.util import curry
        import six, pyrsistent as pyr, numpy as np
        paradigm_order = ['dorsal', 'ventral', 'vertical', 'horizontal']
        roi_eccens = VisualPerformanceFieldsDataset.roi_eccens
        roi_angles = VisualPerformanceFieldsDataset.roi_angles
        if angles is None or angles is Ellipsis:
            angles = roi_angles
        e = eccentricity_range
        if e is None or e is Ellipsis:
            e = list(VisualPerformanceFieldsDataset.roi_eccens)
        if pimms.is_list(e) and all(pimms.is_tuple(q) for q in e):
            f = VisualPerformanceFieldsDataset._generate_subject_DROI_details
            res = [f(subjects, sid, h, eccentricity_range=q) for q in e]
            q = res[0]

            def _merge(p, a):
                r = {}
                for k in six.iterkeys(q[p][a]):
                    u = [u[p][a][k] for u in res if len(u[p][a][k]) > 0]
                    if len(u) == 0: u = np.asarray([], dtype=np.float)
                    else: u = np.concatenate(u)
                    r[k] = u
                return pyr.pmap(r)

            return pyr.pmap({
                k: pimms.lmap({a: curry(_merge, k, a)
                               for a in angles})
                for k in paradigm_order
            })
        f0 = VisualPerformanceFieldsDataset._generate_subject_DROI_data
        f = lambda sid, h, k: f0(subjects[sid],
                                 h,
                                 k,
                                 eccentricity_range=e,
                                 results='all',
                                 min_variance_explained=min_variance_explained,
                                 method=method)
        lm0 = pimms.lmap({k: curry(f, sid, h, k) for k in angles})
        pfn = lambda p: pimms.lmap(
            {k: curry(lambda k: lm0[k][p], k)
             for k in angles})
        return pimms.lmap({p: curry(pfn, p) for p in paradigm_order})
 def generate_DROI_details(self,
                           eccentricity_range=None,
                           angles=None,
                           min_variance_explained=0,
                           method=None):
     '''
     generate_DROI_details() yields a set of lazily computed DROI detailed analyses; these
     analyses are used to generate the DROI table(s).
     '''
     import six
     from neuropythy.util import curry
     f = curry(
         VisualPerformanceFieldsDataset._generate_subject_DROI_details,
         self.subjects)
     m = {
         sid: pimms.lmap({
             h: curry(f,
                      sid,
                      h,
                      eccentricity_range=eccentricity_range,
                      angles=angles,
                      min_variance_explained=min_variance_explained,
                      method=method)
             for h in ['lh', 'rh']
         })
         for sid in six.iterkeys(self.subjects)
     }
     return pimms.persist(m)
 def subjects(inferred_maps, boundary_distances, subject_list):
     '''
     subjects is a dictionary of subject objects for all subjects used in the visual performance
     fields dataset. All subject objects in the subejcts dict include property data on the native
     hemispheres for inferred retinotopic maps and for V1 boundary distances.
     '''
     from neuropythy.util import curry
     f = VisualPerformanceFieldsDataset._generate_subject
     return pimms.lmap({
         sid: curry(f, sid, inferred_maps, boundary_distances)
         for sid in subject_list
     })
示例#4
0
 def asmap(self):
     check_path = self.options['check_path']
     # search through the subjects in this directory
     res = {}
     for sub in os.listdir(self.path):
         if self.bids and sub.startswith('sub-'):
             (sub, name) = (sub, sub[4:])
         else:
             name = sub
         sdir = os.path.join(self.path, sub)
         if not check_path or is_hcp_subject_path(sdir):
             res[name] = curry(self._get_subject, sdir, name)
     return pimms.lmap(res)
 def DROI_details(subjects):
     '''
     DROI_details is a nested-dictionary structure of the various DROI details of each subject
     and hemisphere.
     '''
     import neuropythy as ny, os, six
     from neuropythy.util import curry
     f = curry(
         VisualPerformanceFieldsDataset._generate_subject_DROI_details,
         subjects)
     m = {
         sid: pimms.lmap({h: curry(f, sid, h)
                          for h in ['lh', 'rh']})
         for sid in six.iterkeys(subjects)
     }
     return pimms.persist(m)
    def DROI_tables(subject_list, pseudo_path):
        '''
        DROI_tables (distance-based regions of interest) is a dictionary of ROIS used in the visual
        performance field project. DROI_tables[sid] is a dataframe of the ROI-data for the subject
        with ID sid.
        '''
        import neuropythy as ny, os, six

        # Load one subject.
        def _load_DROI(sid):
            # get a subject-specific cache_path
            cpath = pseudo_path.local_path('DROIs', '%s.csv' % (sid, ))
            return ny.load(cpath)

        return pimms.lmap(
            {sid: ny.util.curry(_load_DROI, sid)
             for sid in subject_list})
    def boundary_distances(pseudo_path, subject_list, inferred_maps):
        '''
        boundary_distances is a nested-dictionary structure containing distances between
        each vertex and a V1 boundary. If x is boundar_distances[sid][h][b][k] then x is
        the distance between the k'th vertex and boundary b ("ventral", "dorsal", or
        "horizontal") in the h hemisphere ("lh" or "rh") of the subject with ID sid.
        '''
        import os, six
        from neuropythy.util import curry
        from neuropythy import load

        def _load_distances(sid, h):
            flnm = pseudo_path.local_path('distances', '%s_%s.mgz' % (sid, h))
            (v, d, h) = load(flnm).T
            return pimms.persist({'ventral': v, 'dorsal': d, 'horizontal': h})

        return pimms.persist({
            sid: pimms.lmap(
                {h: curry(_load_distances, sid, h)
                 for h in ['lh', 'rh']})
            for sid in subject_list
        })
    def inferred_maps(pseudo_path, subject_list):
        '''
        inferred_maps is a nested-dictionary structure containing the retinotopic maps inferred by
        using Bayesian inference on the retinotopic maps of the subjects in the HCP 7T Retinotopy
        Dataset.
        '''
        import os, six
        from neuropythy.util import curry
        from neuropythy import load
        inffiles = VisualPerformanceFieldsDataset.inferred_map_files

        def _load_infmaps(sid, h, patt):
            flnm = pseudo_path.local_path('inferred_maps', patt % (sid, h))
            return load(flnm)

        return pimms.persist({
            sid: {
                h: pimms.lmap({('inf_' + k): curry(_load_infmaps, sid, h, v)
                               for (k, v) in six.iteritems(inffiles)})
                for h in ['lh', 'rh']
            }
            for sid in subject_list
        })
示例#9
0
def calc_images(subject, atlas_properties, image_template, worklog):
    '''
    calc_images is a calculator that converts the atlas properties into a similar nested map of
    atlas images.

    Afferent parameters:
      @ image_template 
        The image template object or file to be used as a template for the output image. This
        can be None (in which case 'brain' is used) or a string that identifies an image of the
        subject. Or it can be a valid filename fo an image.

    Efferent values:
      @ atlas_images 
        A nested lazy-map structure that contains the 3D images for each of the relevant atlases
        and properties.
    '''
    from neuropythy import image_clear
    # parse the image template
    if image_template is None: image_template = subject.images['brain']
    elif pimms.is_str(image_template):
        if image_template in subject.images:
            image_template = subject.images[image_template]
        else:
            try:
                image_template = ny.load(image_template, to='image')
            except Exception:
                image_template = None
    if image_template is None:
        raise ValueError('Could not load or find image template: %s' %
                         (image_template, ))
    image_template = image_clear(image_template)
    # having the image template, make the addresses:
    hemis = list(
        six.iterkeys(
            next(six.itervalues(next(six.itervalues(atlas_properties))))))
    addr = pimms.lmap({
        h: curry(lambda h: subject.hemis[h].image_address(image_template), h)
        for h in hemis
    })
    worklog('Preparing images...')

    def _make_images(vd, m):
        worklog('Constructing %s images...' % (m, ))
        dat = {h: vd[h][m] for h in six.iterkeys(vd)}
        lk = next(
            iter(
                sorted(k for k in six.iterkeys(dat)
                       if k.lower().startswith('lh'))), None)
        rk = next(
            iter(
                sorted(k for k in six.iterkeys(dat)
                       if k.lower().startswith('rh'))), None)
        idat = (dat[lk] if lk is not None else None,
                dat[rk] if rk is not None else None)
        if np.array_equal(idat, (None, None)): return None
        aa = (addr.get(lk, None), addr.get(rk, None))
        im = subject.cortex_to_image(idat,
                                     image_template,
                                     hemi=(lk, rk),
                                     address=aa)
        return im

    ims = {
        atl: {
            v: pimms.lmap(
                {m: curry(_make_images, vd, m)
                 for m in six.iterkeys(hd)})
            for (v, vd) in six.iteritems(pps)
            for hd in [next(six.itervalues(vd))]
        }
        for (atl, pps) in six.iteritems(atlas_properties)
    }
    return {
        'atlas_images': pimms.persist(ims),
        'image_template_object': image_template
    }