Exemplo n.º 1
0
 def calc(self, var='ori', fixed=None, force=False):
     """Calculate the actual tuning curve"""
     if not force and self.var == var:
         return  # calc was already run with desired var, so results should be fine
     if fixed != None:
         fixedsweepis = []
         for fixedvar, fixedvals in fixed.items():
             vals = self.experiment.sweeptable.data[fixedvar]
             if fixedvar == 'ori':  # correct for orientation offset by adding
                 if (vals > 180).any():
                     maxori = 360
                 else:
                     maxori = 180
                 vals = vals.copy()  # don't modify the sweeptable!
                 vals += self.experiment.s.orioff  # static parameter
                 vals %= maxori
             sweepis = []
             for fixedval in toiter(fixedvals):
                 sweepis.append(np.where(vals == fixedval)[0])
             sweepis = np.concatenate(sweepis)
             fixedsweepis.append(sweepis)
         # intersect all fixedvar arrays in fixedsweepis:
         fixedsweepis = core.intersect1d(fixedsweepis)
         #print(fixedsweepis)
     # get values for var at all unique sweep indices:
     try:
         vals = self.experiment.sweeptable.data[var]
     except AttributeError:
         # something different about ptc15, new bug?. Also, sweeptable values are in a list
         # instead of an array like for post ptc15. Should be converted to an array
         # somewhere, so it need not be done here? Also, ptc15 experiments are missing
         # .s and .d attribs, whose contents seems to be found in .oldparams
         vals = np.asarray(self.experiment.sweeptable[var])
     if var == 'ori':  # correct for orientation offset by adding it to ori vals
         if (vals > 180).any():
             maxori = 360
         else:
             maxori = 180
         vals = vals.copy()  # don't modify the sweeptable!
         try:
             vals += self.experiment.s.orioff  # static parameter
         except AttributeError:  # for ptc15, should be fixed:
             vals += self.experiment.oldparams['orioff']  # static parameter
         vals %= maxori
     x = np.unique(vals)  # x axis
     y = np.zeros(len(x), dtype=int)  # spike counts for each variable value
     for vali, val in enumerate(x):
         sweepis = np.where(vals == val)[0]
         if fixed != None:
             sweepis = np.intersect1d(sweepis,
                                      fixedsweepis,
                                      assume_unique=True)
             print(sweepis)
         for sweepi in sweepis:
             y[vali] += self.counts[sweepi].sum()
     self.x, self.y = x, y
     self.peak = x[y.argmax()]
Exemplo n.º 2
0
 def get_nids(self, rids=None):
     """Return nids of active neurons common to all recordings specified in rids.
     Otherwise, return all active nids in all recordings. Active neurons in a recording
     are those with at least MINRATE mean spike rate during the recording"""
     if rids == None: # return all nids in all recordings
         rids = list(self.r.keys())
         return np.unique(np.hstack([ self.r[rid].n.keys() for rid in rids ]))
     else: # return intersection of nids of specified recordings
         nids = [ self.r[rid].n.keys() for rid in rids ]
         return core.intersect1d(nids, assume_unique=True)
Exemplo n.º 3
0
 def get_nids(self, rids=None):
     """Return nids of active neurons common to all recordings specified in rids.
     Otherwise, return all active nids in all recordings. Active neurons in a recording
     are those with at least MINRATE mean spike rate during the recording"""
     if rids == None:  # return all nids in all recordings
         rids = list(self.r)
         return np.unique(np.hstack([list(self.r[rid].n) for rid in rids]))
     else:  # return intersection of nids of specified recordings
         nids = [list(self.r[rid].n) for rid in rids]
         return core.intersect1d(nids, assume_unique=True)
Exemplo n.º 4
0
 def calc(self, var='ori', fixed=None, force=False):
     """Calculate the actual tuning curve"""
     if not force and self.var == var:
         return # calc was already run with desired var, so results should be fine
     if fixed != None:
         fixedsweepis = []
         for fixedvar, fixedvals in fixed.items():
             vals = self.experiment.sweeptable.data[fixedvar]
             if fixedvar == 'ori': # correct for orientation offset by adding
                 if (vals > 180).any():
                     maxori = 360
                 else:
                     maxori = 180
                 vals = vals.copy() # don't modify the sweeptable!
                 vals += self.experiment.s.orioff # static parameter
                 vals %= maxori
             sweepis = []
             for fixedval in toiter(fixedvals):
                 sweepis.append(np.where(vals == fixedval)[0])
             sweepis = np.concatenate(sweepis)
             fixedsweepis.append(sweepis)
         # intersect all fixedvar arrays in fixedsweepis:
         fixedsweepis = core.intersect1d(fixedsweepis)
         #print(fixedsweepis)
     # get values for var at all unique sweep indices:
     try:
         vals = self.experiment.sweeptable.data[var]
     except AttributeError:
         # something different about ptc15, new bug?. Also, sweeptable values are in a list
         # instead of an array like for post ptc15. Should be converted to an array
         # somewhere, so it need not be done here? Also, ptc15 experiments are missing
         # .s and .d attribs, whose contents seems to be found in .oldparams
         vals = np.asarray(self.experiment.sweeptable[var])
     if var == 'ori': # correct for orientation offset by adding it to ori vals
         if (vals > 180).any():
             maxori = 360
         else:
             maxori = 180
         vals = vals.copy() # don't modify the sweeptable!
         try:
             vals += self.experiment.s.orioff # static parameter
         except AttributeError: # for ptc15, should be fixed:
             vals += self.experiment.oldparams['orioff'] # static parameter
         vals %= maxori
     x = np.unique(vals) # x axis
     y = np.zeros(len(x), dtype=int) # spike counts for each variable value
     for vali, val in enumerate(x):
         sweepis = np.where(vals == val)[0]
         if fixed != None:
             sweepis = np.intersect1d(sweepis, fixedsweepis, assume_unique=True)
             print(sweepis)
         for sweepi in sweepis:
             y[vali] += self.counts[sweepi].sum()
     self.x, self.y = x, y
     self.peak = x[y.argmax()]
Exemplo n.º 5
0
 def get_allnids(self, rids=None):
     """Return nids of all neurons (active and quiet) common to all recordings
     specified in rids, ie return the intersection. If rids==None, return the union
     of all nids in the track instead"""
     if rids == None:
         rids = sorted(self.r.keys())
         allnids = np.hstack([ self.r[rid].alln.keys() for rid in rids ])
         return np.unique(allnids)
     else:
         allnids = [ self.r[rid].alln.keys() for rid in rids ]
         return core.intersect1d(allnids, assume_unique=True)
Exemplo n.º 6
0
 def get_allnids(self, rids=None):
     """Return nids of all neurons (active and quiet) common to all recordings
     specified in rids, ie return the intersection. If rids==None, return the union
     of all nids in the track instead"""
     if rids == None:
         rids = sorted(self.r)
         allnids = np.hstack([list(self.r[rid].alln) for rid in rids])
         return np.unique(allnids)
     else:
         allnids = [list(self.r[rid].alln) for rid in rids]
         return core.intersect1d(allnids, assume_unique=True)
Exemplo n.º 7
0
 def plot(self, var='ori', fixed=None):
     """var: string name of variable you want to plot a tuning curve for
     fixed: dict with keys containing names of vars to keep fixed when building tuning
     curve, and values containing each var's value(s) to fix at
     
     Ex: r71.n[1].tune().plot('phase0', fixed={'ori':138, 'sfreqCycDeg':[0.4, 0.8]})
     """
     if not self.done:
         self.calc(tdelay=self.tdelay)
     if fixed != None:
         fixedsweepis = []
         for fixedvar, fixedvals in fixed.items():
             vals = self.experiment.sweeptable.data[fixedvar]
             if fixedvar == 'ori': # correct for orientation offset by adding
                 if (vals > 180).any():
                     maxori = 360
                 else:
                     maxori = 180
                 vals = vals.copy() # don't modify the sweeptable!
                 vals += self.experiment.s.orioff # static parameter
                 vals %= maxori
             sweepis = []
             for fixedval in toiter(fixedvals):
                 sweepis.append(np.where(vals == fixedval)[0])
             sweepis = np.concatenate(sweepis)
             fixedsweepis.append(sweepis)
         # intersect all fixedvar arrays in fixedsweepis:
         fixedsweepis = core.intersect1d(fixedsweepis)
         #print(fixedsweepis)
     # get values for var at all unique sweep indices:
     vals = self.experiment.sweeptable.data[var]
     if var == 'ori': # correct for orientation offset by adding
         if (vals > 180).any():
             maxori = 360
         else:
             maxori = 180
         vals = vals.copy() # don't modify the sweeptable!
         vals += self.experiment.s.orioff # static parameter
         vals %= maxori
     x = np.unique(vals) # x axis
     y = np.zeros(len(x), dtype=int) # spike counts for each variable value
     for vali, val in enumerate(x):
         sweepis = np.where(vals == val)[0]
         if fixed != None:
             sweepis = np.intersect1d(sweepis, fixedsweepis, assume_unique=True)
             print sweepis
         for sweepi in sweepis:
             y[vali] += self.counts[sweepi].sum()
     # create a new figure:
     f = pl.figure()
     a = f.add_subplot(111)
     a.plot(x, y, 'k.-')
     a.set_xlabel(var)
     a.set_ylabel('spike count')
     titlestr = lastcmd()
     titlestr += ' nid%d' % self.neuron.id
     a.set_title(titlestr)
     f.canvas.window().setWindowTitle(titlestr)
     a.text(0.99, 0.99, 'peak=(%s, %s)' % (x[y.argmax()], y.max()),
            transform=a.transAxes,
            horizontalalignment='right',
            verticalalignment='top')
     f.tight_layout(pad=0.3) # crop figure to contents
     self.f = f
     self.x, self.y = x, y
     return self
Exemplo n.º 8
0
    def scsistim(self, method='weighted mean', width=None, tres=None, timeaverage=False,
                 plottime=False, s=5, figsize=(7.5, 6.5)):
        """Scatter plot some summary statistic of spike correlations of each recording vs
        synchrony index SI. Colour each point according to stimulus type. width and tres
        dictate tranges to split recordings up into. timeaverage means average across time
        values of both sc and si for each recording"""
        ## TODO: maybe limit to visually responsive cells
        ## TODO: add linear regression of si vs log(sc)

        uns = get_ipython().user_ns
        if width == None:
            width = uns['SIWIDTH'] # want powers of two for efficient FFT
        if tres == None:
            tres = width
        rids = sorted(self.r) # do everything in rid order
        recs = [ self.r[rid] for rid in rids ]
        msrids, bsrids, mvrids, dbrids = [], [], [], []
        for rid in rids:
            r = self.r[rid]
            rname = r.name
            if 'mseq' in rname:
                msrids.append(rid)
            elif 'blank' in rname or 'spont' in rname:
                bsrids.append(rid)
            elif 'MVI' in rname:
                mvrids.append(rid)
            elif 'driftbar' in rname:
                dbrids.append(rid)

        print('mseq: %r' % [self.r[rid].name for rid in msrids])
        print('blankscreen: %r' % [self.r[rid].name for rid in bsrids])
        print('movie: %r' % [self.r[rid].name for rid in mvrids])
        print('driftbar: %r' % [self.r[rid].name for rid in dbrids])
        isect = core.intersect1d([msrids, bsrids, mvrids, dbrids])
        if len(isect) != 0:
            raise RuntimeError("some rids were classified into more than one type: %r" % isect)
        rids = np.unique(np.hstack([msrids, bsrids, mvrids, dbrids]))

        scs, sis, c = [], [], []
        for rid in rids:
            r = self.r[rid]
            print('%s: %s' % (r.absname, r.name))
            spikecorr = r.sc(width=width, tres=tres)
            sc, si = spikecorr.si(method=method, plot=False) # calls sc.sct() and sc.si()
            sc = sc[0] # pull out the spike correlation values that span all laminae
            if timeaverage:
                # average across all time values of sc and si to get a single coordinate
                # per recording
                sc = sc.mean()
                si = si.mean()
            scs.append(sc)
            sis.append(si)
            if rid in msrids: color = 'k'
            elif rid in bsrids: color = 'e'
            elif rid in mvrids: color = 'r'
            elif rid in dbrids: color = 'b'
            else: raise ValueError("unclassified recording: %r" % r.name)
            c.append(np.tile(color, len(sc)))
        scs = np.hstack(scs)
        sis = np.hstack(sis)
        c = np.hstack(c)
        
        f = pl.figure(figsize=figsize)
        a = f.add_subplot(111)
        if plottime: # underplot lines connecting points adjacent in time
            a.plot(scs, sis, 'e--')
        a.scatter(scs, sis, c=c, edgecolors='none', s=s)
        a.set_ylim(0, 1)
        a.set_xlabel('%s spike correlations' % method)
        a.set_ylabel('synchrony index')
        titlestr = lastcmd()
        gcfm().window.setWindowTitle(titlestr)
        a.set_title(titlestr)
        # make proxy line artists for legend:
        ms = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='k', mec='k')
        bs = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='e', mec='e')
        mv = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='r', mec='r')
        db = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='b', mec='b')
        # add legend:
        a.legend([ms, bs, mv, db],
                 ['mseq', 'blank screen', 'movie', 'drift bar'],
                 numpoints=1, loc='lower right',
                 handlelength=1, handletextpad=0.5, labelspacing=0.1)
        f.tight_layout(pad=0.3) # crop figure to contents
        return scs, sis, c
Exemplo n.º 9
0
    def scsistim(self,
                 method='mean',
                 width=None,
                 tres=None,
                 timeaverage=False,
                 plottime=False,
                 s=5,
                 figsize=(7.5, 6.5)):
        """Scatter plot some summary statistic of spike correlations of each recording vs
        LFP synchrony index SI. Colour each point according to stimulus type. width and tres
        (sec) dictate tranges to split recordings up into. timeaverage averages across time
        values of both sc and si for each recording. s is point size"""
        ## TODO: maybe limit to visually responsive cells
        ## TODO: add linear regression of si vs log(sc)

        uns = get_ipython().user_ns
        if width == None:
            width = uns['LFPSIWIDTH']
        if tres == None:
            tres = width
        bsrids = uns['BSRIDS'][self.absname]
        msrids = uns['MSRIDS'][self.absname]
        mvrids = uns['NSRIDS'][self.absname]
        dbrids = uns['DBRIDS'][self.absname]
        rids = sorted(bsrids + msrids + mvrids +
                      dbrids)  # do everything in rid order
        print('blankscreen: %r' % [self.r[rid].name for rid in bsrids])
        print('mseq: %r' % [self.r[rid].name for rid in msrids])
        print('movie: %r' % [self.r[rid].name for rid in mvrids])
        print('driftbar: %r' % [self.r[rid].name for rid in dbrids])
        isect = core.intersect1d([msrids, bsrids, mvrids, dbrids])
        if len(isect) != 0:
            raise RuntimeError(
                "some rids were classified into more than one type: %r" %
                isect)

        scs, sis, c = [], [], []
        for rid in rids:
            r = self.r[rid]
            print('%s: %s' % (r.absname, r.name))
            spikecorr = r.sc(width=width, tres=tres)
            """
            TODO: not sure if this is the right way to do this. A different set of neurons for
            each recording are chosen, then mean sc(t) across all pairs for each recording is
            found, and pooled across recordings. This pooling is maybe a bit dodgy. Is it
            valid to pool sc(t) values across recordings when the included neurons are
            different for each recording? The alternative is to deal only with neurons which
            exceed MINTHRESH track-wide, but the problem with that is that for much of the
            time, such neurons are completely silent, and therefore don't deserve to be
            included in sc calculations for those durations.
            """
            sc, si = spikecorr.si(method=method,
                                  plot=False)  # calls sc.sct() and sc.si()
            sc = sc[
                0]  # pull out the spike correlation values that span all laminae
            if timeaverage:
                # average across all time values of sc and si to get a single coordinate
                # per recording
                sc = sc.mean()
                si = si.mean()
            scs.append(sc)
            sis.append(si)
            if rid in bsrids: color = 'e'
            elif rid in msrids: color = 'k'
            elif rid in mvrids: color = 'r'
            elif rid in dbrids: color = 'b'
            else: raise ValueError("unclassified recording: %r" % r.name)
            c.append(np.tile(color, len(sc)))
        scs = np.hstack(scs)
        sis = np.hstack(sis)
        c = np.hstack(c)

        f = pl.figure(figsize=figsize)
        a = f.add_subplot(111)
        if plottime:  # underplot lines connecting points adjacent in time
            a.plot(scs, sis, 'e--')
        a.scatter(scs, sis, c=c, edgecolors='none', s=s)
        a.set_ylim(0, 1)
        a.set_xlabel('%s spike correlations' % method)
        a.set_ylabel('synchrony index')
        titlestr = lastcmd()
        gcfm().window.setWindowTitle(titlestr)
        a.set_title(titlestr)
        # make proxy line artists for legend:
        bs = mpl.lines.Line2D([1], [1],
                              color='white',
                              marker='o',
                              mfc='e',
                              mec='e')
        ms = mpl.lines.Line2D([1], [1],
                              color='white',
                              marker='o',
                              mfc='k',
                              mec='k')
        mv = mpl.lines.Line2D([1], [1],
                              color='white',
                              marker='o',
                              mfc='r',
                              mec='r')
        db = mpl.lines.Line2D([1], [1],
                              color='white',
                              marker='o',
                              mfc='b',
                              mec='b')
        # add legend:
        a.legend([bs, ms, mv, db],
                 ['blank screen', 'mseq', 'movie', 'drift bar'],
                 numpoints=1,
                 loc='lower right',
                 handlelength=1,
                 handletextpad=0.5,
                 labelspacing=0.1)
        f.tight_layout(pad=0.3)  # crop figure to contents
        return scs, sis, c
Exemplo n.º 10
0
    def scsistim(self, method='mean', width=None, tres=None, timeaverage=False,
                 plottime=False, s=5, figsize=(7.5, 6.5)):
        """Scatter plot some summary statistic of spike correlations of each recording vs
        LFP synchrony index SI. Colour each point according to stimulus type. width and tres
        (sec) dictate tranges to split recordings up into. timeaverage averages across time
        values of both sc and si for each recording. s is point size"""
        ## TODO: maybe limit to visually responsive cells
        ## TODO: add linear regression of si vs log(sc)

        uns = get_ipython().user_ns
        if width == None:
            width = uns['LFPSIWIDTH']
        if tres == None:
            tres = width
        bsrids = uns['BSRIDS'][self.absname]
        msrids = uns['MSRIDS'][self.absname]
        mvrids = uns['NSRIDS'][self.absname]
        dbrids = uns['DBRIDS'][self.absname]
        rids = sorted(bsrids + msrids + mvrids + dbrids) # do everything in rid order
        print('blankscreen: %r' % [self.r[rid].name for rid in bsrids])
        print('mseq: %r' % [self.r[rid].name for rid in msrids])
        print('movie: %r' % [self.r[rid].name for rid in mvrids])
        print('driftbar: %r' % [self.r[rid].name for rid in dbrids])
        isect = core.intersect1d([msrids, bsrids, mvrids, dbrids])
        if len(isect) != 0:
            raise RuntimeError("some rids were classified into more than one type: %r" % isect)

        scs, sis, c = [], [], []
        for rid in rids:
            r = self.r[rid]
            print('%s: %s' % (r.absname, r.name))
            spikecorr = r.sc(width=width, tres=tres)
            """
            TODO: not sure if this is the right way to do this. A different set of neurons for
            each recording are chosen, then mean sc(t) across all pairs for each recording is
            found, and pooled across recordings. This pooling is maybe a bit dodgy. Is it
            valid to pool sc(t) values across recordings when the included neurons are
            different for each recording? The alternative is to deal only with neurons which
            exceed MINTHRESH track-wide, but the problem with that is that for much of the
            time, such neurons are completely silent, and therefore don't deserve to be
            included in sc calculations for those durations.
            """
            sc, si = spikecorr.si(method=method, plot=False) # calls sc.sct() and sc.si()
            sc = sc[0] # pull out the spike correlation values that span all laminae
            if timeaverage:
                # average across all time values of sc and si to get a single coordinate
                # per recording
                sc = sc.mean()
                si = si.mean()
            scs.append(sc)
            sis.append(si)
            if rid in bsrids: color = 'e'
            elif rid in msrids: color = 'k'
            elif rid in mvrids: color = 'r'
            elif rid in dbrids: color = 'b'
            else: raise ValueError("unclassified recording: %r" % r.name)
            c.append(np.tile(color, len(sc)))
        scs = np.hstack(scs)
        sis = np.hstack(sis)
        c = np.hstack(c)
        
        f = pl.figure(figsize=figsize)
        a = f.add_subplot(111)
        if plottime: # underplot lines connecting points adjacent in time
            a.plot(scs, sis, 'e--')
        a.scatter(scs, sis, c=c, edgecolors='none', s=s)
        a.set_ylim(0, 1)
        a.set_xlabel('%s spike correlations' % method)
        a.set_ylabel('synchrony index')
        titlestr = lastcmd()
        gcfm().window.setWindowTitle(titlestr)
        a.set_title(titlestr)
        # make proxy line artists for legend:
        bs = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='e', mec='e')
        ms = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='k', mec='k')
        mv = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='r', mec='r')
        db = mpl.lines.Line2D([1], [1], color='white', marker='o', mfc='b', mec='b')
        # add legend:
        a.legend([bs, ms, mv, db],
                 ['blank screen', 'mseq', 'movie', 'drift bar'],
                 numpoints=1, loc='lower right',
                 handlelength=1, handletextpad=0.5, labelspacing=0.1)
        f.tight_layout(pad=0.3) # crop figure to contents
        return scs, sis, c