Example #1
0
 def parameters(params):
     '''
     rsp.parameters is a pimms.itable object of the divisive normalization parameters (each row
     of the itable is one parameterization) used in the calculation for the immage array response
     object rsp. The parameters may be given to a response object as a list of maps or as an
     itable.
     '''
     if pimms.is_itable(params):
         return params
     elif pimms.is_map(params):
         tbl = pimms.itable(params)
         # we want this to fail if it can't be transformed to rows
         try:
             tbl.rows
         except:
             raise ValueError('map could not be cast to itable')
         return tbl
     else:
         tbl = {}
         try:
             p0 = params[0]
             tbl = {k: [v] for (k, v) in p0.iteritems()}
             for p in params[1:]:
                 if len(p) != len(p0): raise ValueError()
                 for (k, v) in p.iteritems():
                     tbl[k].append(v)
             tbl = pimms.itable(tbl)
             tbl.rows
         except:
             raise ValueError(
                 'parameters must be an itable, a map of columns, or a list of '
                 + 'parameter maps')
         return tbl
Example #2
0
 def v123_table(cache_directory, subjects):
     '''
     dataset.v123_table is a pimms ITable object for the BensonWinawer2018Dataset; the table
     contains all relevant pRF data for all cortical surface vertices in the 8 subjects included
     in the paper Benson and Winawer (2018).
     '''
     # First, see if there's a cache file
     cachefl = os.path.join(cache_directory, 'v123_table.p')
     if os.path.isfile(cachefl):
         try: return pimms.load(cachefl)
         except:
             msg = 'neuropythy: Could not load existing v123_table cache file: %s' % cache_file
             warnings.warn(msg)
     # go through, building up arrays of arrays that we will concatenate at the end
     data = AutoDict()
     data.on_miss = lambda:[] # we want it to auto-produce lists...
     # non-retinotopy props we want to add to the data...
     props = ['midgray_surface_area', 'pial_surface_area', 'white_surface_area', 'label']
     for (sid,sub) in six.iteritems(subjects):
         for hname in ['lh','rh']:
             hemi = sub.hemis[hname]
             for (dskey,dsdata) in six.iteritems(BensonWinawer2018Dataset.prf_meta_data):
                 dsid = 99 if dskey == 'prf' else int(dskey[3:])
                 # okay, let's get the raw data we need to process...
                 ang = hemi.prop(dskey + '_polar_angle')
                 ecc = hemi.prop(dskey + '_eccentricity')
                 # and the inferred data...
                 iang = hemi.prop('inf-' + dskey + '_polar_angle')
                 iecc = hemi.prop('inf-' + dskey + '_eccentricity')
                 ilbl = hemi.prop('inf-' + dskey + '_visual_area')
                 # process both of these (get x/y basically)
                 (x, y)  = as_retinotopy({'polar_angle':ang,  'eccentricity':ecc},
                                         'geographical')
                 (ix,iy) = as_retinotopy({'polar_angle':iang, 'eccentricity':iecc},
                                         'geographical')
                 # find the relevant vertices
                 ii = np.where((iecc < 12) & np.sum([ilbl == k for k in (1,2,3)], axis=0))[0]
                 # now add the relevant properties...
                 for p in props: data[p].append(hemi.prop(p)[ii])
                 for p0 in ['polar_angle', 'eccentricity', 'radius', 'variance_explained']:
                     p = dskey + '_' + p0
                     data[p0].append(hemi.prop(p)[ii])
                 for (p,u) in zip(['x', 'y'], [x,y]):
                     data[p].append(u)
                 for p0 in ['_polar_angle', '_eccentricity', '_radius', '_visual_area']:
                     p = 'inf-' + dskey + p0
                     data['inf' + p0].append(hemi.prop(p)[ii])
                 for (p0,u) in zip(['inf_x', 'inf_y'], [ix,iy]):
                     data[p0].append(u)
                 # we also want repeated properties for some things
                 extras = {'subject':sid, 'hemi':hname, 'dataset_id':dsid, 'dataset_name':dskey}
                 for (p,v) in six.iteritems(extras): data[p].append(np.full(len(ii), v))
     # concatenate everything
     data = pimms.itable({k:np.concatenate(v) for (k,v) in six.iteritems(data)})
     if not os.path.isfile(cachefl):
         # try to write out the cache file
         try: pimms.save(cachefl, data)
         except: pass
     return data
Example #3
0
 def tess(faces, cortical_coordinates, visual_coordinates,
          polar_angles, eccentricities, cleaned_visual_areas):
     'mdl.tess is the tesselation object for mesh model.'
     props = pimms.itable({'polar_angle':  polar_angles,
                           'eccentricity': eccentricities,
                           'visual_area':  cleaned_visual_areas,
                           'cortical_coordinates': cortical_coordinates.T,
                           'visual_coordinates':   visual_coordinates.T})
     if isinstance(faces, geo.Tesselation): return faces.copy(properties=props)
     return geo.Tesselation(faces, properties=props).persist()
Example #4
0
def calc_divisive_normalization(labels,
                                saturation_constants_by_label,
                                divisive_exponents_by_label,
                                divisive_normalization_schema='Heeger1992'):
    '''
    calc_divisive_normalization is a calculator that prepares the divisive normalization function
    to be run in the sco pipeline. It gathers parameters into a pimms itable (such that each row
    is a map of the parameters for each pRF in the pRFs list), which is returned as the value
    'divisive_normalization_parameters'; it also adds a 'divisive_normalization_function' that
    is appropriate for the parameters given. In the case of this implementation, the parameters
    saturation_constant and divisive_exponent are extracted from the afferent parameters
    saturation_constant_by_label and divisive_exponent_by_label, and the function
    sco.impl.benson17.divisively_normalize_Heeger1992 is used.

    Required afferent parameters:
      * labels
      @ saturation_constants_by_label Must be a map whose keys are label values and whose values are
        the saturation constant for the particular area; all values appearing in the pRF labels
        must be found in this map.
      * divisive_exponents_by_label Must be a map whose keys are label values and whose values are
        the divisive normalization exponent for that particular area; all values appearing in the
        pRF labels must be found in this map.

    Optional afferent parameters:
     @ divisive_normalization_schema specifies the kind of divisive normalization to perform;
       currently this must be either 'Heeger1992' or 'naive'; the former is the default.

    Provided efferent values:
      @ divisive_normalization_parameters Will be an ITable whose columns correspond to the
        divisive normalization formula's saturation constant and exponent; the rows will correspond
        to the pRFs.
      @ divisive_normalization_function Will be a function compatible with the
        divisive_normalization_parameters data-table; currently this is
        sco.impl.benson17.divisively_normalize_Heeger1992.
    '''
    sat = sco.util.lookup_labels(labels, saturation_constants_by_label)
    rxp = sco.util.lookup_labels(labels, divisive_exponents_by_label)
    tr = {
        'heeger1992': '.divisively_normalize_Heeger1992',
        'naive': '.divisively_normalize_naive',
        'sfreq': '.divisively_normalize_spatialfreq',
        'square': '.divisively_normalize_Heeger1992_square',
        'heeger1992_square': '.divisively_normalize_Heeger1992_square'
    }
    dns = divisive_normalization_schema.lower()
    return (_pimms.itable(saturation_constant=sat, divisive_exponent=rxp),
            (__name__ + tr[dns]) if dns in tr else dns)
Example #5
0
    def test_itable(self):
        '''
        test_itable() tests pimms itable objects and makes sure they work correctly.
        '''
        class nloc:
            lazy_loads = 0

        def _load_lazy():
            nloc.lazy_loads += 1
            return pimms.quant(np.random.rand(10), 'sec')

        dat = pimms.lazy_map({
            'a': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
            'b':
            pimms.quant(np.random.rand(10), 'mm'),
            'c': [
                'abc', 'def', 'ghi', 'jkl', 'mno', 'pqr', 'stu', 'vwx', 'yz!',
                '!!!'
            ],
            'd':
            _load_lazy
        })
        tbl = pimms.itable(dat)
        # make sure the data is the right size
        for k in tbl.keys():
            self.assertTrue(tbl[k].shape == (10, ))
        self.assertTrue(tbl.row_count == 10)
        self.assertTrue(len(tbl.rows) == 10)
        self.assertTrue(len(tbl.column_names) == 4)
        self.assertTrue(len(tbl.columns) == 4)
        # Check a few of the entries
        for (i, ki) in zip(np.random.randint(0, tbl.row_count, 50),
                           np.random.randint(0, 4, 50)):
            ki = tbl.column_names[ki]
            self.assertTrue(tbl.rows[i][ki] == tbl[ki][i])
        self.assertTrue(nloc.lazy_loads == 1)
        # see if we can discard stuff
        self.assertTrue('a' in tbl)
        self.assertFalse('a' in tbl.discard('a'))
        self.assertFalse('b' in tbl.discard('b'))
        self.assertFalse('c' in tbl.discard('b').discard('c'))
        self.assertTrue('c' in tbl.discard('b').discard('a'))
Example #6
0
def test_stimuli(stimulus_directory,
                 max_eccentricity=10.0,
                 pixels_per_degree=6.4,
                 orientations=None,
                 spatial_frequencies=None,
                 contrasts=None,
                 densities=None,
                 base_density=10,
                 base_contrast=0.75,
                 base_spatial_frequency=0.8,
                 base_orientation=0):
    '''
    test_stimuli(stimulus_directory) creates a variety of test png files in the given directory.
      The image filenames are returned as a list. The images created are as follows:
      * A blank gray image
      * A variety of sinusoidal gratings; these vary in terms of:
        * contrast
        * spatial frequency
        * orientation
      * A variety of modulated sinusoidal gratings; in which the modulated spatial frequency is
        held at 1 cycle / degree and the modulated contrast is as high as possible; these vary in
        terms of:
        * contrast
        * spatial frequency
        * orientation
        * modulated orientation

    The following options may be given:
      * max_eccentricity (default: 10.0) specifies the maximum eccentricity in degrees to include in
        the generated images.
      * pixels_per_degree (default: 6.4) specifies the pixels per degree of the created images.
      * orientations (default: None) specifies the orientations (NOTE: in degrees) of the various
        gratings generated; if None, then uses [0, 30, 60, 90, 120, 150].
      * spatial_frequencies (defaut: None) specifies the spatial frequencies to use; by default uses
        a set of 5 spatial frequencies that depend on the resolution specified by pixels_per_degree.
      * contrasts (default: None) specifies the contrasts of the images to make; if None, then uses
        [0.25, 0.5, 0.75, 1.0].
    '''
    import skimage.io, warnings
    # Process/organize arguments
    maxecc = pimms.mag(max_eccentricity, 'deg')
    d2p = pimms.mag(pixels_per_degree, 'px/deg')
    sdir = stimulus_directory
    thetas = np.arange(
        0, 180, 22.5) if orientations is None else np.asarray(orientations)
    sfreqs = (d2p/4) * (2**np.linspace(-4.0, 0.0, 5)) if spatial_frequencies is None else \
             pimms.mag(spatial_frequencies, 'cycles/degree')
    ctsts = [0.25, 0.5, 0.75, 1.0] if contrasts is None else contrasts
    dnsts = [3, 6, 12, 24, 48] if densities is None else densities
    cpd0 = pimms.mag(base_spatial_frequency, 'cycles/degree')
    th0 = pimms.mag(base_orientation, 'deg') * np.pi / 180
    ct0 = base_contrast
    dn0 = base_density
    # go ahead and setup x and y values (in degrees) for the images
    dim = np.round(d2p * 2.0 * maxecc)
    center = 0.5 * (dim - 1)  # in pixels
    # x/y in pixels
    x = np.arange(0, dim, 1)
    # x/y in degrees
    x = (x - center) / d2p
    # mesh grid...
    (x, y) = np.meshgrid(x, x)
    # how we save images
    flnms = []
    fldat = []

    def _imsave(nm, im):
        flnm = os.path.join(sdir, nm + '.png')
        flnms.append(flnm)
        return imsave(flnm, im)

    def _immeta(meta, **kw):
        meta = pimms.merge(meta, kw)
        fldat.append(meta)
        return meta

    # have to catch the UserWarnings for low contrast images
    with warnings.catch_warnings():
        warnings.filterwarnings('ignore', category=UserWarning)
        # Generate the basic sin-grating-based images
        # First we want varying orientation, all at cpd=1
        meta = {
            'cpd': cpd0,
            'contrast': ct0,
            'type': 'sin_orientation',
            'theta': th0
        }
        for theta_deg in sorted(thetas):
            theta = np.pi / 180 * theta_deg
            im = 0.5 * (1 + np.sin(
                (np.cos(theta) * x - np.sin(theta) * y) * 2 * np.pi * cpd0))
            if ct0 < 1: im = (im - 0.5) * ct0 + 0.5
            _imsave('sin[theta=%06.3f]' % theta, im)
            _immeta(meta, theta=theta)
        # okay, now for theta=0 and variable cpd
        for cpd in reversed(sorted(sfreqs)):
            im = 0.5 * (1 + np.sin(
                (np.cos(th0) * x - np.sin(th0) * y) * 2 * np.pi * cpd))
            if ct0 < 1: im = (im - 0.5) * ct0 + 0.5
            # write it out
            _imsave('sin[cpd=%06.3f]' % cpd, im)
            _immeta(meta, cpd=cpd, type='sin_frequency')
        # Now we can look at the requested contrasts
        for ct in sorted(ctsts):
            # save the simple grating first
            im = 0.5 * (1 + np.sin(
                (np.cos(th0) * x - np.sin(th0) * y) * 2 * np.pi * cpd0))
            if ct < 1: im = (im - 0.5) * ct + 0.5
            _imsave('sin[contrast=%06.3f]' % ct, im)
            _immeta(meta, contrast=ct, type='sin_contrast')
        # Next, plaids; these are relatively easy:
        x0 = np.cos(th0) * x - np.sin(th0) * y
        y0 = np.sin(th0) * x + np.cos(th0) * y
        im0 = 0.25 * (2 + np.sin(x0 * 2 * np.pi * cpd0) +
                      np.sin(y0 * 2 * np.pi * cpd0))
        im0 = (im0 - 0.5) / np.max(np.abs(im0 - 0.5)) + 0.5
        for ct in sorted(ctsts):
            if ct == 0: continue
            if ct < 1: im = (im0 - 0.5) * ct + 0.5
            else: im = im0
            _imsave('plaid[contrast=%06.3f]' % ct, im)
            _immeta(meta, contrast=ct, type='plaid_contrast')
        # okay, now the sum of sixteen gratings
        const = 2 * np.pi * cpd0
        for ct in sorted(ctsts):
            if ct == 0: continue
            im0 = np.mean([
                0.5 * (1 + np.sin(const * (np.random.rand() - 0.5 +
                                           np.cos(th) * x - np.sin(th) * y)))
                for th in np.linspace(0, 2 * np.pi, 17)[:-1]
            ],
                          axis=0)
            im0 = (im0 - 0.5) / np.max(np.abs(im0 - 0.5)) + 0.5
            if ct < 1: im = (im0 - 0.5) * ct + 0.5
            else: im = im0
            _imsave('circ[contrast=%06.3f]' % ct, im)
            _immeta(meta, contrast=ct, type='circ_contrast')
        # Okay, make the noise pattern images
        dmeta = pyr.pmap(meta).discard('orientation').set('cpd', cpd0)
        for dn in dnsts:
            if dn <= 0: continue
            im = noise_pattern_stimulus(x.shape[0], dn, ct0, cpd0 / 2)
            _imsave('noise[density=%02d]' % dn, im)
            _immeta(dmeta, density=dn, type='noise_density')
    # Make fldat into a pimms itable
    fldat = [m.set('filename', flnm) for (m, flnm) in zip(fldat, flnms)]
    tbl = pimms.itable({
        k: np.asarray([ff[k] for ff in fldat])
        for k in six.iterkeys(fldat[0])
    })
    return tbl
Example #7
0
    def test_persist(self):
        '''
        test_persist() tests pimms persist() function.
        '''
        from .lazy_complex import LazyComplex

        z = LazyComplex((1.0, 2.0))
        self.assertFalse(z.is_persistent())
        self.assertTrue(z.is_transient())
        z.persist()
        self.assertTrue(z.is_persistent())
        self.assertFalse(z.is_transient())

        z = LazyComplex((1.0, 2.0))
        self.assertFalse(z.is_persistent())
        self.assertTrue(z.is_transient())
        zp = pimms.persist(z)
        self.assertTrue(zp.is_persistent())
        self.assertFalse(zp.is_transient())
        self.assertFalse(z.is_persistent())
        self.assertTrue(z.is_transient())

        m0 = {
            'a': [1, 2, 3],
            'b': (2, 3, 4),
            'c': {
                'd': 'abc',
                'e': set(['def', 'ghi']),
                'f': frozenset([10, 11, 12])
            },
            'z':
            z,
            'zp':
            zp,
            'q': (1, 2, [3, 4]),
            't':
            pimms.itable({
                'c1': range(10),
                'c2': range(1, 11),
                'c3': range(2, 12)
            })
        }
        m = pimms.persist(m0)
        self.assertIs(m['b'], m0['b'])
        self.assertIsNot(m['a'], m0['a'])
        self.assertTrue(all(ai == bi for (ai, bi) in zip(m['a'], m0['a'])))
        self.assertTrue(pimms.is_pmap(m['c']))
        self.assertIs(m['c']['d'], m0['c']['d'])
        self.assertTrue(isinstance(m['c']['e'], frozenset))
        self.assertTrue(isinstance(m['c']['f'], frozenset))
        self.assertTrue(
            all(ai == bi for (ai, bi) in zip(m['c']['f'], m0['c']['f'])))
        self.assertTrue(m['z'].is_persistent())
        self.assertIs(m['zp'], m0['zp'])
        self.assertIs(m['q'], m0['q'])
        self.assertIs(m['q'][2], m0['q'][2])
        self.assertTrue(pimms.is_itable(m['t']))
        self.assertTrue(m['t'].is_persistent())
        m = pimms.persist(m0, depth=1)
        self.assertIs(m['b'], m0['b'])
        self.assertIsNot(m['a'], m0['a'])
        self.assertTrue(all(ai == bi for (ai, bi) in zip(m['a'], m0['a'])))
        self.assertTrue(pimms.is_pmap(m['c']))
        self.assertIs(m['c']['d'], m0['c']['d'])
        self.assertTrue(isinstance(m['c']['e'], set))
        self.assertTrue(isinstance(m['c']['f'], frozenset))
        self.assertTrue(
            all(ai == bi for (ai, bi) in zip(m['c']['f'], m0['c']['f'])))
        self.assertTrue(m['z'].is_persistent())
        self.assertIs(m['zp'], m0['zp'])
        self.assertIs(m['q'], m0['q'])
        self.assertIs(m['q'][2], m0['q'][2])
        self.assertTrue(pimms.is_itable(m['t']))
        self.assertTrue(m['t'].is_persistent())