示例#1
0
 def _prepare(self):
     if len(PUnitPhases()) != len(self):
         df = pd.DataFrame(PUnitPhases().fetch())
         df['phase'] = [circ.mean(e) for e in df.phases]
         df['jitter'] = [circ.std(ph) for ph in df.phases]
         self.insert([e.to_dict() for _, e in df.ix[:, ('fish_id', 'cell_id', 'phase', 'jitter')].iterrows()],
                     skip_duplicates=True)
def test_std():
    data = np.array([
        1.80044838, 2.02938314, 1.03534016, 4.84225057, 1.54256458, 5.19290675,
        2.18474784, 4.77054777, 1.51736933, 0.72727580
    ])
    s = pycircstat.std(data)
    assert_allclose(1.4657, s, atol=0.001, rtol=0.001)
示例#3
0
    def _prepare(self):
        if len(PUnitPhases()) != len(self):
            df = pd.DataFrame(PUnitPhases().fetch())
            df['phase'] = [circ.mean(e) for e in df.phases]

            def center(x):
                x['phase'] = circ.center(x.phase)
                return x

            df = df.groupby('fish_id').apply(center)
            df['jitter'] = [circ.std(ph) for ph in df.phases]
            self.insert([e.to_dict() for _, e in df.ix[:, ('fish_id', 'cell_id', 'phase', 'jitter')].iterrows()],
                        skip_duplicates=True)
示例#4
0
    def plot(self):
        # plot mean phase of spikes to show that they are fish dependent
        df = pd.DataFrame(self.fetch())
        df['eod'] = [1 / np.median(np.diff(e)) for e in df.eod_times]
        df['cmean'] = [circ.mean(e) for e in df.phases]
        df['jitter'] = [circ.std(ph) / 2 / np.pi / e for ph, e in zip(df.phases, df.eod)]

        model = ols('cmean ~ C(fish_id)', data=df).fit()
        table = sm.stats.anova_lm(model)
        print(table)

        sns.factorplot('fish_id', 'cmean', data=df, kind='bar')
        g = sns.pairplot(df.ix[:, ('cmean', 'jitter', 'fish_id')], hue='fish_id')
        plt.show()
示例#5
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'])
        sampling_rate, eod = (Baseline() & key).fetch1['samplingrate', 'eod']
        dt = 1. / sampling_rate

        trials = Baseline.LocalEODPeaksTroughs() * Baseline.SpikeTimes() & key

        aggregated_spikes = np.hstack([s / 1000 - p[0] * dt for s, p in zip(*trials.fetch['times', 'peaks'])])

        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        key['base_var'], key['base_mean'], key['base_std'] = \
            circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
        self.insert1(key)
示例#6
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'])
        sampling_rate, eod = (Baseline() & key).fetch1('samplingrate', 'eod')
        dt = 1. / sampling_rate

        trials = Baseline.LocalEODPeaksTroughs() * Baseline.SpikeTimes() & key

        aggregated_spikes = np.hstack([
            s / 1000 - p[0] * dt
            for s, p in zip(*trials.fetch('times', 'peaks'))
        ])

        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        key['base_var'], key['base_mean'], key['base_std'] = \
            circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
        self.insert1(key)
示例#7
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'], 'run', key['run_id'], )
        if SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0, refined=1):
            eod, vs = (SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                                                            refined=1)).fetch1['frequency', 'vector_strength']
        elif SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0, refined=0):
            eod, vs = (SecondOrderSignificantPeaks() & dict(key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                                                            refined=0)).fetch1['frequency', 'vector_strength']
        else:
            eod = (Runs() & key).fetch1['eod']

        aggregated_spikes = np.hstack(TrialAlign().load_trials(key))
        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        if len(aggregated_spikes) > 1:
            key['stim_var'], key['stim_mean'], key['stim_std'] = \
                circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
            self.insert1(key)
示例#8
0
    def _make_tuples(self, key):
        print('Processing', key['cell_id'], 'run', key['run_id'])
        if SecondOrderSignificantPeaks() & dict(
                key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                refined=1):
            eod, vs = (SecondOrderSignificantPeaks()
                       & dict(key,
                              eod_coeff=1,
                              stimulus_coeff=0,
                              baseline_coeff=0,
                              refined=1)).fetch1('frequency',
                                                 'vector_strength')
        elif SecondOrderSignificantPeaks() & dict(
                key, eod_coeff=1, stimulus_coeff=0, baseline_coeff=0,
                refined=0):
            eod, vs = (SecondOrderSignificantPeaks()
                       & dict(key,
                              eod_coeff=1,
                              stimulus_coeff=0,
                              baseline_coeff=0,
                              refined=0)).fetch1('frequency',
                                                 'vector_strength')
        else:
            eod = (Runs() & key).fetch1('eod')

        aggregated_spikes = TrialAlign().load_trials(key)
        if len(aggregated_spikes) == 0:
            warn('TrialAlign returned no spikes. Skipping')
            return
        else:
            aggregated_spikes = np.hstack(aggregated_spikes)
        aggregated_spikes %= 1 / eod

        aggregated_spikes *= eod * 2 * np.pi  # normalize to 2*pi
        if len(aggregated_spikes) > 1:
            key['stim_var'], key['stim_mean'], key['stim_std'] = \
                circ.var(aggregated_spikes), circ.mean(aggregated_spikes), circ.std(aggregated_spikes)
            self.insert1(key)
示例#9
0
    def load_spikes(self, key, centered=True, plot=False):
        if centered:
            Phases = (RandomTrials.PhaseSet() * CenteredPUnitPhases()).project('phase', phase_cell='cell_id')
        else:
            Phases = (RandomTrials.PhaseSet() * UncenteredPUnitPhases()).project('phase', phase_cell='cell_id')
        trials = Runs.SpikeTimes() * RandomTrials.TrialSet() * Phases * TrialAlign() & key

        times, phase, align_times = trials.fetch['times', 'phase', 't0']

        dt = 1. / (Runs() & trials).fetch1['samplingrate']

        eod, duration = (Runs() & trials).fetch1['eod', 'duration']
        rad2period = 1 / 2 / np.pi / eod
        # get spikes, convert to s, align to EOD, add bootstrapped phase
        print('Phase std', circ.std(phase), 'Centered', centered)

        if plot:
            figdir = 'figures/sanity/pyr_lif_stimulus/'
            mkdir(figdir)
            fig, ax = plt.subplots(2, 1, sharex=True)

            spikes = [s / 1000 - t0 for s, t0 in zip(times, align_times)]
            for i, s in enumerate(spikes):
                ax[0].plot(s, 0 * s + i, '.k', ms=1)
            ax[0].set_title('without phase variation')
            spikes = [s / 1000 - t0 + ph * rad2period for s, t0, ph in zip(times, align_times, phase)]
            for i, s in enumerate(spikes):
                ax[1].plot(s, 0 * s + i, '.k', ms=1)
            ax[1].set_title('with phase variation')
            fig.savefig(figdir +
                'alignments_{n_total}_{pyr_simul_id}_{repeat_id}_{centered}.pdf'.format(centered=centered, **key))


        spikes = [s / 1000 - t0 + ph * rad2period for s, t0, ph in zip(times, align_times, phase)]

        return spikes, dt, eod, duration
示例#10
0
def test_std():
    data = np.array([1.80044838, 2.02938314, 1.03534016, 4.84225057,
                     1.54256458, 5.19290675, 2.18474784,
                     4.77054777, 1.51736933, 0.72727580])
    s = pycircstat.std(data)
    assert_allclose(1.4657, s, atol=0.001, rtol=0.001)