コード例 #1
0
    def test_corrcoef_binned(self):
        '''
        Test result of a correlation coefficient between two binned spike
        trains.
        '''

        # Calculate clipped and unclipped
        res_clipped = sc.corrcoef(
            self.binned_st, clip=True)
        res_unclipped = sc.corrcoef(
            self.binned_st, clip=False)

        # Check dimensions
        self.assertEqual(len(res_clipped), 2)
        self.assertEqual(len(res_unclipped), 2)

        # Check result unclipped against result calculated from scratch for
        # the off-diagonal element
        mat = self.binned_st.matrix_unclipped()
        mean_0 = np.mean(mat[0])
        mean_1 = np.mean(mat[1])
        target_from_scratch = \
            np.dot(mat[0] - mean_0, mat[1] - mean_1) / \
            np.sqrt(
                np.dot(mat[0] - mean_0, mat[0] - mean_0) *
                np.dot(mat[1] - mean_1, mat[1] - mean_1))

        # Check result unclipped against result calculated by numpy.corrcoef
        target_numpy = np.corrcoef(mat)

        self.assertAlmostEqual(target_from_scratch, target_numpy[0][1])
        self.assertAlmostEqual(res_unclipped[0][1], target_from_scratch)
        self.assertAlmostEqual(res_unclipped[1][0], target_from_scratch)

        # Check result clipped against result calculated from scratch for
        # the off-diagonal elemant
        mat = self.binned_st.matrix_clipped()
        mean_0 = np.mean(mat[0])
        mean_1 = np.mean(mat[1])
        target_from_scratch = \
            np.dot(mat[0] - mean_0, mat[1] - mean_1) / \
            np.sqrt(
                np.dot(mat[0] - mean_0, mat[0] - mean_0) *
                np.dot(mat[1] - mean_1, mat[1] - mean_1))

        # Check result unclipped against result calculated by numpy.corrcoef
        target_numpy = np.corrcoef(mat)

        self.assertAlmostEqual(target_from_scratch, target_numpy[0][1])
        self.assertAlmostEqual(res_clipped[0][1], target_from_scratch)
        self.assertAlmostEqual(res_clipped[1][0], target_from_scratch)
コード例 #2
0
    def test_corrcoef_binned(self):
        '''
        Test the correlation coefficient between two binned spike trains.
        '''

        # Calculate clipped and unclipped
        res_clipped = sc.corrcoef(
            self.binned_st, binary=True)
        res_unclipped = sc.corrcoef(
            self.binned_st, binary=False)

        # Check dimensions
        self.assertEqual(len(res_clipped), 2)
        self.assertEqual(len(res_unclipped), 2)

        # Check result unclipped against result calculated from scratch for
        # the off-diagonal element
        mat = self.binned_st.to_array()
        mean_0 = np.mean(mat[0])
        mean_1 = np.mean(mat[1])
        target_from_scratch = \
            np.dot(mat[0] - mean_0, mat[1] - mean_1) / \
            np.sqrt(
                np.dot(mat[0] - mean_0, mat[0] - mean_0) *
                np.dot(mat[1] - mean_1, mat[1] - mean_1))

        # Check result unclipped against result calculated by numpy.corrcoef
        target_numpy = np.corrcoef(mat)

        self.assertAlmostEqual(target_from_scratch, target_numpy[0][1])
        self.assertAlmostEqual(res_unclipped[0][1], target_from_scratch)
        self.assertAlmostEqual(res_unclipped[1][0], target_from_scratch)

        # Check result clipped against result calculated from scratch for
        # the off-diagonal elemant
        mat = self.binned_st.to_bool_array()
        mean_0 = np.mean(mat[0])
        mean_1 = np.mean(mat[1])
        target_from_scratch = \
            np.dot(mat[0] - mean_0, mat[1] - mean_1) / \
            np.sqrt(
                np.dot(mat[0] - mean_0, mat[0] - mean_0) *
                np.dot(mat[1] - mean_1, mat[1] - mean_1))

        # Check result unclipped against result calculated by numpy.corrcoef
        target_numpy = np.corrcoef(mat)

        self.assertAlmostEqual(target_from_scratch, target_numpy[0][1])
        self.assertAlmostEqual(res_clipped[0][1], target_from_scratch)
        self.assertAlmostEqual(res_clipped[1][0], target_from_scratch)
コード例 #3
0
    def test_corrcoef_binned_short_input(self):
        '''
        Test if input list of one binned spike train yields 1.0.
        '''
        # Calculate correlation
        binned_st = conv.BinnedSpikeTrain(
            self.st_0, t_start=0 * pq.ms, t_stop=50. * pq.ms,
            binsize=1 * pq.ms)
        result = sc.corrcoef(binned_st, fast=False)
        target = np.array(1.)

        # Check result and dimensionality of result
        self.assertEqual(result.ndim, 0)
        assert_array_almost_equal(result, target)
        assert_array_almost_equal(result, sc.corrcoef(binned_st, fast=True))
コード例 #4
0
    def generate_cc_matrix(self,
                           spiketrains=None,
                           binary=False,
                           model=None,
                           nan_to_num=False,
                           **kwargs):
        """
        Calculates the covariances between all pairs of spike trains.

        Parameters
        ----------
        spiketrain_list : list of neo.SpikeTrain (default None)
            If no list is passed the function tries to access the class
            parameter 'spiketrains'.

        binary: bool (default False)
            Parameter is passed to
            elephant.spike_train_correlation.covariance()

        kwargs:
            Passed to elephant.conversion.BinnedSpikeTrain()

        Returns : list of floats
            list of covariances of length = (N^2 - N)/2 where N is the number
            of spike trains.
        -------
        """
        binned_sts = self.robust_BinnedSpikeTrain(spiketrains, **kwargs)

        cc_matrix = corrcoef(binned_sts, binary=binary)

        if nan_to_num:
            cc_matrix = np.nan_to_num(cc_matrix)
        return cc_matrix
コード例 #5
0
    def test_corrcoef_binned_same_spiketrains(self):
        '''
        Test if the correlation coefficient between two identical binned spike
        trains evaluates to a 2x2 matrix of ones.
        '''
        # Calculate correlation
        binned_st = conv.BinnedSpikeTrain(
            [self.st_0, self.st_0], t_start=0 * pq.ms, t_stop=50. * pq.ms,
            binsize=1 * pq.ms)
        result = sc.corrcoef(binned_st, fast=False)
        target = np.ones((2, 2))

        # Check dimensions
        self.assertEqual(len(result), 2)
        # Check result
        assert_array_almost_equal(result, target)
        assert_array_almost_equal(result, sc.corrcoef(binned_st, fast=True))
コード例 #6
0
    def test_corrcoef_binned_short_input(self):
        '''
        Test if input list of one binned spike train yields 1.0.
        '''
        # Calculate correlation
        binned_st = conv.Binned(
            self.st_0, t_start=0 * pq.ms, t_stop=50. * pq.ms,
            binsize=1 * pq.ms)
        target = sc.corrcoef(binned_st)

        # Check result
        self.assertEqual(target, 1.)
コード例 #7
0
    def test_corrcoef_binned_short_input(self):
        '''
        Test if input list of one binned spike train yields 1.0.
        '''
        # Calculate correlation
        binned_st = conv.BinnedSpikeTrain(
            self.st_0, t_start=0 * pq.ms, t_stop=50. * pq.ms,
            binsize=1 * pq.ms)
        target = sc.corrcoef(binned_st)

        # Check result and dimensionality of result
        self.assertEqual(target.ndim, 0)
        self.assertEqual(target, 1.)
コード例 #8
0
    def test_corrcoef_binned_same_spiketrains(self):
        '''
        Test if the correlation coefficient between two identical binned spike
        trains evaluates to a 2x2 matrix of ones.
        '''
        # Calculate correlation
        binned_st = conv.BinnedSpikeTrain(
            [self.st_0, self.st_0], t_start=0 * pq.ms, t_stop=50. * pq.ms,
            binsize=1 * pq.ms)
        target = sc.corrcoef(binned_st)

        # Check dimensions
        self.assertEqual(len(target), 2)
        # Check result
        assert_array_equal(target, 1.)
コード例 #9
0
def get_default_corrcoef_matrix():
    # set random seed explicitly, which is used in homogeneous_poisson_process,
    # to avoid using different seeds for creating target and result image
    np.random.seed(0)
    spike_train_1 = homogeneous_poisson_process(rate=10.0 * Hz,
                                                t_start=0.0 * s,
                                                t_stop=10.0 * s)
    spike_train_2 = homogeneous_poisson_process(rate=10.0 * Hz,
                                                t_start=0.0 * s,
                                                t_stop=10.0 * s)
    # the binsize of 0.1s is rather large so we might expect non-zero
    # cross-correlation
    corrcoef_matrix = stcorr.corrcoef(
        BinnedSpikeTrain([spike_train_1, spike_train_2], binsize=0.1 * s))
    return corrcoef_matrix
コード例 #10
0
    def test_empty_spike_train(self):
        '''
        Test whether a warning is yielded in case of empty spike train.
        Also check correctness of the output array.
        '''
        # st_2 is empty
        binned_12 = conv.BinnedSpikeTrain([self.st_1, self.st_2],
                                          binsize=1 * pq.ms)

        with self.assertWarns(UserWarning):
            result = sc.corrcoef(binned_12, fast=False)

        # test for NaNs in the output array
        target = np.zeros((2, 2)) * np.NaN
        target[0, 0] = 1.0
        assert_array_almost_equal(result, target)
コード例 #11
0
    def test_corrcoef_cont_binned(self):
        '''
        Test if the binned and continuous-time corrcoef functions return the
        same thing in the case that there are no coincidences that across bin
        borders.
        '''
        # Calculate non-binned and binned corrcoef
        res_nonbinned = sc.corrcoef_continuous(
            [self.st_0, self.st_1], coinc_width=1 * pq.ms)
        res_binned = sc.corrcoef(
            self.binned_st, clip=False)

        # Check dimensions
        self.assertEqual(len(res_nonbinned), 2)
        self.assertEqual(len(res_binned), 2)

        assert_array_equal(res_binned, res_nonbinned)
コード例 #12
0
    def test_empty_spike_train(self):
        '''
        Test whether a warning is yielded in case of empty spike train.
        Also check correctness of the output array.
        '''
        # st_2 is empty
        binned_12 = conv.BinnedSpikeTrain([self.st_1, self.st_2],
                                          binsize=1 * pq.ms)

        # test for a warning
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always')
            ccmat = sc.corrcoef(binned_12)
            self.assertTrue(issubclass(w.pop().category, UserWarning))

        # test for NaNs in the output array
        target = np.zeros((2, 2)) * np.NaN
        target[0, 0] = 1.0
        assert_array_equal(ccmat, target)
コード例 #13
0
def correlation_matrix(fig, ax, tr, crun='run_00000000', N=50, nbin=None):

    if nbin == None:
        nbin = int(tr.T / (50 * ms))

    df = tr.crun.GExc_spks
    xt, xi = df.t, df.i

    sts = [
        neo.SpikeTrain(xt[xi == i] / second * pq.s,
                       t_stop=tr.T / second * pq.s) for i in range(N)
    ]

    x = corrcoef(BinnedSpikeTrain(sts, num_bins=nbin))

    x[np.diag_indices(N)] = 0

    divider = make_axes_locatable(ax)
    cax = divider.append_axes('right', size='5%', pad=0.05)

    im = ax.imshow(x)
    fig.colorbar(im, cax=cax, orientation='vertical')
コード例 #14
0
def calc_corellation(spike_times, spike_ids, num, duration, bin_x=None):
    # Create randomly shuffled indices
    neuron_indices = np.arange(num)
    np.random.shuffle(neuron_indices)

    # Loop through indices
    spike_trains = []
    for n in neuron_indices:
        # Extract spike times
        neuron_spike_times = spike_times[spike_ids == n]

        # If there are any spikes
        if len(neuron_spike_times) > 0:
            # Add neo SpikeTrain object
            spike_trains.append(
                SpikeTrain(neuron_spike_times * ms,
                           t_start=1 * s,
                           t_stop=10 * s))

            # If we have found our 200 spike trains, stop
            if len(spike_trains) == 200:
                break

    # Check that 200 spike trains containing spikes could be found
    assert len(spike_trains) == 200

    # Bin spikes using bins corresponding to 2ms refractory period
    binned_spike_trains = BinnedSpikeTrain(spike_trains, binsize=2.0 * ms)

    # Calculate correlation matrix
    correlation = corrcoef(binned_spike_trains)

    # Take lower triangle of matrix (minus diagonal)
    correlation_non_disjoint = correlation[np.tril_indices_from(correlation,
                                                                k=-1)]

    # Calculate histogram
    return calc_histogram(correlation_non_disjoint, 0.002, bin_x)
コード例 #15
0
#         plt.plot(t, i * np.ones_like(t), 'k.', markersize=2)
# plt.axis('tight')
# plt.xlim(0, 1000)
# plt.xlabel('Time (ms)', fontsize=16)
# plt.ylabel('Spike Train Index', fontsize=16)
# plt.gca().tick_params(axis='both', which='major', labelsize=14)
# #plt.show()
# cc_matrix = corrcoef(BinnedSpikeTrain(spiketrain_list, 1 * ms))
# print(cc_matrix[0][1])

rate_correlation = []
for x in permutations(np.divide(np.linspace(0, 100, 11),100), 3):
    if sum(x) == 1:
        spiketrain_list = cpp(500 * Hz, x, 1000 * ms)
        rate = len(LIF_R_ASC_AT(w_e, w_i, spiketrain_list[0], spiketrain_list[1]))
        cc_matrix = corrcoef(BinnedSpikeTrain(spiketrain_list, 5 * ms))
        rate_correlation.append([cc_matrix[0][1], rate])

print(rate_correlation)
x_val = [x[0] for x in rate_correlation]
y_val = [x[1] for x in rate_correlation]
#plt.scatter(x_val, y_val, marker="x")
sns.regplot(x_val, y_val, ci=None)
plt.ylim((0, 30))
plt.xlim((0, 1))
plt.xlabel("Pearson’s correlation coefficient")
plt.ylabel("Output firing rate (Hz)")
#sns.lmplot("Correlation", "Output firing rate (Hz)", pd.DataFrame((x_val, y_val), columns =['Correlation', 'Output firing rate (Hz)']))
plt.show()

slope, intecept = np.polyfit(x_val, y_val, 1)
コード例 #16
0
 def test_corrcoef_fast_mode(self):
     np.random.seed(27)
     st = homogeneous_poisson_process(rate=10 * pq.Hz, t_stop=10 * pq.s)
     binned_st = conv.BinnedSpikeTrain(st, num_bins=10)
     assert_array_almost_equal(sc.corrcoef(binned_st, fast=False),
                               sc.corrcoef(binned_st, fast=True))
コード例 #17
0
        psth.index = np.arange(650,3170,1)
        plt.plot(psth.iloc[950:1250],label='conductance*'+key)
        channels[item][key]['PSTH'] = psth
    plt.legend()
    plt.axis('tight')
#    plt.show()
    plt.savefig(item+'_psth_detail.png',dpi=600,format='png')

#%%
###cross-trial correlations WITHIN-cell

from elephant.spike_train_correlation import corrcoef

for item in g_list:
    for key in channels[item]:
        tmpcorr = corrcoef(channels[item][key]['binned_spike_list'])
        tmpmean = np.mean(tmpcorr)
        channels[item][key]['corr'] = tmpmean
        tmpcorr = None
        tmpmean = None

#%%
"""
12 Dec 2017
The elephant BOX kernel doesn't seem to work really, it fails, not sure if me
or something wrong with the code itself. Honestly, could just make the array
myself for the most part as it's just a value of 1 for the width of 2delta.
Anyway, this first cell is how to do it on a single binary binned spike train.
Next cell will be doing it for all simulations.

This cell now does a loop through 8 delta ms values, through all the sim files
コード例 #18
0
    print('Processing dataset ' + str(i_run + 1) + '/' + str(nrun))
    path = '../data' + str(i_run) + '/'
    spike_times_list = __load_spike_times(path, name, begin, end, npop)

    for ipop in range(npop):
        spike_times = spike_times_list[ipop]
        st_list = []
        for j in range(matrix_size):
            spike_train = SpikeTrain(np.array(spike_times[j]) * s,
                                     t_stop=(end / 1000.0) * s)
            st_list.append(spike_train)

        binned_st = BinnedSpikeTrain(st_list, spike_time_bin * s, None,
                                     (begin / 1000.0) * s, (end / 1000.0) * s)
        #print (binned_st)
        cc_matrix = corrcoef(binned_st)
        correl = []
        for j in range(matrix_size):
            for k in range(matrix_size):
                #print(j, k, cc_matrix[j][k])
                if (j != k and cc_matrix[j][k] < xmax
                        and cc_matrix[j][k] > xmin):
                    correl.append(cc_matrix[j][k])

        x, hist1 = __smooth_hist(correl, xmin, xmax, nx)
        arr = np.column_stack((x, hist1))
        np.savetxt(path + 'correl_' + str(ipop) + '.dat', arr)

        if i_run == 0:
            plt.figure(ipop)
            fig, (ax1, ax2) = plt.subplots(2)
コード例 #19
0
    def test_cross_correlation_histogram(self):
        '''
        Test generic result of a cross-correlation histogram between two binned
        spike trains.
        '''
        # Calculate CCH using Elephant (normal and binary version) with
        # mode equal to 'full' (whole spike trains are correlated)
        cch_clipped, bin_ids_clipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full',
            binary=True)
        cch_unclipped, bin_ids_unclipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full', binary=False)

        cch_clipped_mem, bin_ids_clipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full',
            binary=True, method='memory')
        cch_unclipped_mem, bin_ids_unclipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full',
            binary=False, method='memory')
        # Check consistency two methods
        assert_array_equal(
            np.squeeze(cch_clipped.magnitude), np.squeeze(
                cch_clipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_clipped.times), np.squeeze(
                cch_clipped_mem.times))
        assert_array_equal(
            np.squeeze(cch_unclipped.magnitude), np.squeeze(
                cch_unclipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_unclipped.times), np.squeeze(
                cch_unclipped_mem.times))
        assert_array_almost_equal(bin_ids_clipped, bin_ids_clipped_mem)
        assert_array_almost_equal(bin_ids_unclipped, bin_ids_unclipped_mem)

        # Check normal correlation Note: Use numpy correlate to verify result.
        # Note: numpy conventions for input array 1 and input array 2 are
        # swapped compared to Elephant!
        mat1 = self.binned_st1.to_array()[0]
        mat2 = self.binned_st2.to_array()[0]
        target_numpy = np.correlate(mat2, mat1, mode='full')
        assert_array_equal(
            target_numpy, np.squeeze(cch_unclipped.magnitude))


        # Check cross correlation function for several displacements tau
        # Note: Use Elephant corrcoeff to verify result 
        tau = [-25.0, 0.0, 13.0] # in ms
        for t in tau:
            # adjust t_start, t_stop to shift by tau
            t0 = np.min([self.st_1.t_start+t*pq.ms, self.st_2.t_start])
            t1 = np.max([self.st_1.t_stop+t*pq.ms, self.st_2.t_stop])            
            st1 = neo.SpikeTrain(self.st_1.magnitude+t, units='ms',
                                t_start = t0*pq.ms, t_stop = t1*pq.ms)           
            st2 = neo.SpikeTrain(self.st_2.magnitude, units='ms',
                                t_start = t0*pq.ms, t_stop = t1*pq.ms)              
            binned_sts = conv.BinnedSpikeTrain([st1, st2],
                                               binsize=1*pq.ms,
                                               t_start = t0*pq.ms,
                                               t_stop = t1*pq.ms)
            # caluclate corrcoef
            corrcoef = sc.corrcoef(binned_sts)[1,0]    
            
            # expand t_stop to have two spike trains with same length as st1, st2
            st1 = neo.SpikeTrain(self.st_1.magnitude, units='ms',
                                t_start = self.st_1.t_start, 
                                t_stop = self.st_1.t_stop+np.abs(t)*pq.ms)           
            st2 = neo.SpikeTrain(self.st_2.magnitude, units='ms',
                                t_start = self.st_2.t_start, 
                                t_stop = self.st_2.t_stop+np.abs(t)*pq.ms)
            binned_st1 = conv.BinnedSpikeTrain(
                st1, t_start=0*pq.ms, t_stop=(50+np.abs(t))*pq.ms,
                binsize=1 * pq.ms)
            binned_st2 = conv.BinnedSpikeTrain(
                st2, t_start=0 * pq.ms, t_stop=(50+np.abs(t))*pq.ms,
                binsize=1 * pq.ms)
            # calculate CCHcoef and take value at t=tau
            CCHcoef, _  = sc.cch(binned_st1, binned_st2, 
                               cross_corr_coef=True)
            l = - binned_st1.num_bins + 1
            tau_bin = int(t/float(binned_st1.binsize.magnitude))
            assert_array_equal(corrcoef, CCHcoef[tau_bin-l].magnitude)
            

        # Check correlation using binary spike trains
        mat1 = np.array(self.binned_st1.to_bool_array()[0], dtype=int)
        mat2 = np.array(self.binned_st2.to_bool_array()[0], dtype=int)
        target_numpy = np.correlate(mat2, mat1, mode='full')
        assert_array_equal(
            target_numpy, np.squeeze(cch_clipped.magnitude))

        # Check the time axis and bin IDs of the resulting AnalogSignal
        assert_array_almost_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_unclipped.times)
        assert_array_almost_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_clipped.times)

        # Calculate CCH using Elephant (normal and binary version) with
        # mode equal to 'valid' (only completely overlapping intervals of the
        # spike trains are correlated)
        cch_clipped, bin_ids_clipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=True)
        cch_unclipped, bin_ids_unclipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=False)
        cch_clipped_mem, bin_ids_clipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=True, method='memory')
        cch_unclipped_mem, bin_ids_unclipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=False, method='memory')

        # Check consistency two methods
        assert_array_equal(
            np.squeeze(cch_clipped.magnitude), np.squeeze(
                cch_clipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_clipped.times), np.squeeze(
                cch_clipped_mem.times))
        assert_array_equal(
            np.squeeze(cch_unclipped.magnitude), np.squeeze(
                cch_unclipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_unclipped.times), np.squeeze(
                cch_unclipped_mem.times))
        assert_array_equal(bin_ids_clipped, bin_ids_clipped_mem)
        assert_array_equal(bin_ids_unclipped, bin_ids_unclipped_mem)

        # Check normal correlation Note: Use numpy correlate to verify result.
        # Note: numpy conventions for input array 1 and input array 2 are
        # swapped compared to Elephant!
        mat1 = self.binned_st1.to_array()[0]
        mat2 = self.binned_st2.to_array()[0]
        target_numpy = np.correlate(mat2, mat1, mode='valid')
        assert_array_equal(
            target_numpy, np.squeeze(cch_unclipped.magnitude))

        # Check correlation using binary spike trains
        mat1 = np.array(self.binned_st1.to_bool_array()[0], dtype=int)
        mat2 = np.array(self.binned_st2.to_bool_array()[0], dtype=int)
        target_numpy = np.correlate(mat2, mat1, mode='valid')
        assert_array_equal(
            target_numpy, np.squeeze(cch_clipped.magnitude))

        # Check the time axis and bin IDs of the resulting AnalogSignal
        assert_array_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_unclipped.times)
        assert_array_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_clipped.times)

        # Check for wrong window parameter setting
        self.assertRaises(
            KeyError, sc.cross_correlation_histogram, self.binned_st1,
            self.binned_st2, window='dsaij')
        self.assertRaises(
            KeyError, sc.cross_correlation_histogram, self.binned_st1,
            self.binned_st2, window='dsaij', method='memory')
コード例 #20
0
import argparse

parser = argparse.ArgumentParser()
parser.add_argument('spike_file')
parser.add_argument('--save')

args = parser.parse_args()
fname = args.spike_file 

data = neo.AsciiSpikeTrainIO(filename=fname)

seg = data.read_segment()
spiketrains = seg.spiketrains

binned = BinnedSpikeTrain(spiketrains, binsize=5 * ms)

corr_matrix = corrcoef(binned)

corr_coefs = np.triu(corr_matrix, 1)
corr_coefs = corr_coefs[corr_coefs != 0]

plt.hist(corr_coefs, 10)
plt.xlabel('correlation coefficient')
plt.ylabel('number of pairs')

if args.save:
    np.savez(args.save,
             corr_coefs=corr_coefs)
else:
    plt.show()
コード例 #21
0
for dta, sts in zip(['spinnaker', 'nest'], [sts_spinnaker, sts_nest]):
    for calc_i in range(
            task_starts_idx[job_parameter], task_stop_idx[job_parameter]):
        # save neuron i,j index
        ni = all_combos_unit_i[calc_i]
        nj = all_combos_unit_j[calc_i]

        cc[dta]['unit_i'][calc_i] = ni
        cc[dta]['unit_j'][calc_i] = nj

        print("Correlating %i and %i" % (ni, nj))

        # original CCH
        cco = stc.corrcoef(
            conv.BinnedSpikeTrain(sts[ni], lag_res) , conv.BinnedSpikeTrain(
            sts[nj],lag_res))
        cc[dta]['original_measure'][calc_i] = cco

        surr_i = elephant.spike_train_surrogates.dither_spikes(
            sts[ni], dither=50. * pq.ms, n=num_surrs)
        surr_j = elephant.spike_train_surrogates.dither_spikes(
            sts[nj], dither=50. * pq.ms, n=num_surrs)

        ccs = []
        ccsm = []
        for surrogate in range(num_surrs):
            scc = stc.corrcoef(conv.BinnedSpikeTrain(
                surr_i[surrogate], lag_res), conv.BinnedSpikeTrain(
                surr_j[surrogate], lag_res))
            ccs.append(scc)
コード例 #22
0
ファイル: sttc.py プロジェクト: Trinity-wang/SPROJ
binned_st1 = BinnedSpikeTrain(neoDataset[0], binsize=1 * ms)
binned_st2 = BinnedSpikeTrain(neoDataset[5], binsize=1 * ms)

cch = cross_correlation_histogram(binned_st1,
                                  binned_st2,
                                  window=[-10, 10],
                                  border_correction=True,
                                  binary=True,
                                  kernel=None)
print(cch)
cchArray = cch[0][:, 0].magnitude.round()
cchArrayTime = cch[0].times.magnitude
cchArrayNP = np.array(cchArray)
print("argmax is:", cchArrayNP.max())

print(corrcoef(x, binary=True))

#calculate the cross-correlograms of the entire dataset,
#produce the corresponding correlation matrix
'''
print(calcCCH(8,9,neoDataset))
correlationArray = generateCorrelationMatrix(neoDataset)
print(correlationArray)
'''
'''
x1 = binned_st1.to_array()
x2 = binned_st2.to_array()
x11 = x1[0]
x21 = x2[0]
y1 = [timestep for timestep in range(len(x11))]
y2 = [timestep for timestep in range(len(x21))]
コード例 #23
0
    def test_cross_correlation_histogram(self):
        '''
        Test generic result of a cross-correlation histogram between two binned
        spike trains.
        '''
        # Calculate CCH using Elephant (normal and binary version) with
        # mode equal to 'full' (whole spike trains are correlated)
        cch_clipped, bin_ids_clipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full',
            binary=True)
        cch_unclipped, bin_ids_unclipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full', binary=False)

        cch_clipped_mem, bin_ids_clipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='full',
            binary=True, method='memory')
        cch_unclipped_mem, bin_ids_unclipped_mem = \
            sc.cross_correlation_histogram(
                self.binned_st1, self.binned_st2, window='full',
                binary=False, method='memory')
        # Check consistency two methods
        assert_array_equal(
            np.squeeze(cch_clipped.magnitude), np.squeeze(
                cch_clipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_clipped.times), np.squeeze(
                cch_clipped_mem.times))
        assert_array_equal(
            np.squeeze(cch_unclipped.magnitude), np.squeeze(
                cch_unclipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_unclipped.times), np.squeeze(
                cch_unclipped_mem.times))
        assert_array_almost_equal(bin_ids_clipped, bin_ids_clipped_mem)
        assert_array_almost_equal(bin_ids_unclipped, bin_ids_unclipped_mem)

        # Check normal correlation Note: Use numpy correlate to verify result.
        # Note: numpy conventions for input array 1 and input array 2 are
        # swapped compared to Elephant!
        mat1 = self.binned_st1.to_array()[0]
        mat2 = self.binned_st2.to_array()[0]
        target_numpy = np.correlate(mat2, mat1, mode='full')
        assert_array_equal(
            target_numpy, np.squeeze(cch_unclipped.magnitude))

        # Check cross correlation function for several displacements tau
        # Note: Use Elephant corrcoeff to verify result
        tau = [-25.0, 0.0, 13.0]  # in ms
        for t in tau:
            # adjust t_start, t_stop to shift by tau
            t0 = np.min([self.st_1.t_start + t * pq.ms, self.st_2.t_start])
            t1 = np.max([self.st_1.t_stop + t * pq.ms, self.st_2.t_stop])
            st1 = neo.SpikeTrain(self.st_1.magnitude + t, units='ms',
                                 t_start=t0 * pq.ms, t_stop=t1 * pq.ms)
            st2 = neo.SpikeTrain(self.st_2.magnitude, units='ms',
                                 t_start=t0 * pq.ms, t_stop=t1 * pq.ms)
            binned_sts = conv.BinnedSpikeTrain([st1, st2],
                                               binsize=1 * pq.ms,
                                               t_start=t0 * pq.ms,
                                               t_stop=t1 * pq.ms)
            # caluclate corrcoef
            corrcoef = sc.corrcoef(binned_sts)[1, 0]

            # expand t_stop to have two spike trains with same length as st1,
            # st2
            st1 = neo.SpikeTrain(self.st_1.magnitude, units='ms',
                                 t_start=self.st_1.t_start,
                                 t_stop=self.st_1.t_stop + np.abs(t) * pq.ms)
            st2 = neo.SpikeTrain(self.st_2.magnitude, units='ms',
                                 t_start=self.st_2.t_start,
                                 t_stop=self.st_2.t_stop + np.abs(t) * pq.ms)
            binned_st1 = conv.BinnedSpikeTrain(
                st1, t_start=0 * pq.ms, t_stop=(50 + np.abs(t)) * pq.ms,
                binsize=1 * pq.ms)
            binned_st2 = conv.BinnedSpikeTrain(
                st2, t_start=0 * pq.ms, t_stop=(50 + np.abs(t)) * pq.ms,
                binsize=1 * pq.ms)
            # calculate CCHcoef and take value at t=tau
            CCHcoef, _ = sc.cch(binned_st1, binned_st2,
                                cross_corr_coef=True)
            left_edge = - binned_st1.num_bins + 1
            tau_bin = int(t / float(binned_st1.binsize.magnitude))
            assert_array_equal(
                corrcoef, CCHcoef[tau_bin - left_edge].magnitude)

        # Check correlation using binary spike trains
        mat1 = np.array(self.binned_st1.to_bool_array()[0], dtype=int)
        mat2 = np.array(self.binned_st2.to_bool_array()[0], dtype=int)
        target_numpy = np.correlate(mat2, mat1, mode='full')
        assert_array_equal(
            target_numpy, np.squeeze(cch_clipped.magnitude))

        # Check the time axis and bin IDs of the resulting AnalogSignal
        assert_array_almost_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_unclipped.times)
        assert_array_almost_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_clipped.times)

        # Calculate CCH using Elephant (normal and binary version) with
        # mode equal to 'valid' (only completely overlapping intervals of the
        # spike trains are correlated)
        cch_clipped, bin_ids_clipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=True)
        cch_unclipped, bin_ids_unclipped = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=False)
        cch_clipped_mem, bin_ids_clipped_mem = sc.cross_correlation_histogram(
            self.binned_st1, self.binned_st2, window='valid',
            binary=True, method='memory')
        cch_unclipped_mem, bin_ids_unclipped_mem = \
            sc.cross_correlation_histogram(
                self.binned_st1, self.binned_st2, window='valid',
                binary=False, method='memory')

        # Check consistency two methods
        assert_array_equal(
            np.squeeze(cch_clipped.magnitude), np.squeeze(
                cch_clipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_clipped.times), np.squeeze(
                cch_clipped_mem.times))
        assert_array_equal(
            np.squeeze(cch_unclipped.magnitude), np.squeeze(
                cch_unclipped_mem.magnitude))
        assert_array_equal(
            np.squeeze(cch_unclipped.times), np.squeeze(
                cch_unclipped_mem.times))
        assert_array_equal(bin_ids_clipped, bin_ids_clipped_mem)
        assert_array_equal(bin_ids_unclipped, bin_ids_unclipped_mem)

        # Check normal correlation Note: Use numpy correlate to verify result.
        # Note: numpy conventions for input array 1 and input array 2 are
        # swapped compared to Elephant!
        mat1 = self.binned_st1.to_array()[0]
        mat2 = self.binned_st2.to_array()[0]
        target_numpy = np.correlate(mat2, mat1, mode='valid')
        assert_array_equal(
            target_numpy, np.squeeze(cch_unclipped.magnitude))

        # Check correlation using binary spike trains
        mat1 = np.array(self.binned_st1.to_bool_array()[0], dtype=int)
        mat2 = np.array(self.binned_st2.to_bool_array()[0], dtype=int)
        target_numpy = np.correlate(mat2, mat1, mode='valid')
        assert_array_equal(
            target_numpy, np.squeeze(cch_clipped.magnitude))

        # Check the time axis and bin IDs of the resulting AnalogSignal
        assert_array_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_unclipped.times)
        assert_array_equal(
            (bin_ids_clipped - 0.5) * self.binned_st1.binsize,
            cch_clipped.times)

        # Check for wrong window parameter setting
        self.assertRaises(
            KeyError, sc.cross_correlation_histogram, self.binned_st1,
            self.binned_st2, window='dsaij')
        self.assertRaises(
            KeyError, sc.cross_correlation_histogram, self.binned_st1,
            self.binned_st2, window='dsaij', method='memory')
コード例 #24
0
ファイル: fp_task6_3.py プロジェクト: nikhil-garg/fp-lab
plt.xlim(0, runtime)
plt.xlabel('Time (ms)', fontsize=16)
plt.ylabel('Spike Train Index', fontsize=16)
plt.gca().tick_params(axis='both', which='major', labelsize=14)
plt.savefig('decorr_rasterplot_w{}_k{}.png'.format(w, numInhPerNeuron))
'''

# calculate ISIs and coefficient of variation (CV)

isi_list  = [np.nanmean(isi(spiketrain))       for spiketrain in snglnrn_spikes_neo]
rate_list = [(np.size(spiketrain) / runtime * 1e3) for spiketrain in snglnrn_spikes]
cv_list   = [cv(isi(spiketrain))               for spiketrain in snglnrn_spikes_neo]


train = BinnedSpikeTrain(snglnrn_spikes_neo, binsize=5 * q.ms)
cc_matrix = corrcoef(train, binary=False)

# Matrix zwischenspeichern 
#np.savetxt('cc_matrix.txt', cc_matrix)
#print(np.shape(cc_matrix)) # (192, 192)
#print(cc_matrix)
#plt.plot(cc_matrix)

# Hauptdiagonale entfernen
for i in range(192):
	cc_matrix[i][i] = np.nan 

# Nan Werte entfernen
cc_matrix = cc_matrix[:,~np.isnan(cc_matrix).all(0)]
cc_matrix = cc_matrix[~np.isnan(cc_matrix).all(1)]
コード例 #25
0
# Rates
cc['rate'] = [
    elephant.statistics.mean_firing_rate(st).rescale("Hz").magnitude
    for st in sts]

# CV and LV
isis = [elephant.statistics.isi(st) for st in sts]

cc['cv'] = [elephant.statistics.cv(isi) for isi in isis if len(isi) > 1]
cc['lv'] = [elephant.statistics.lv(isi) for isi in isis if len(isi) > 1]


# original corrcoeff
t0 = time.time()
cco = stc.corrcoef(conv.BinnedSpikeTrain(sts, lag_res))
cco_neg = cco * [cco < 0] + [cco > 0]
cco_pos = cco * [cco > 0] + [cco < 0] * np.array([-1])
cc['corr_coeff'] = cco


print 'Computed corrcoeff'
t1 = time.time()
surr = [elephant.spike_train_surrogates.dither_spike_train(
        st, shift=50. * pq.ms, n=num_surrs) for st in sts]

print 'Generated surrogate'
t2 = time.time()
cco_surr = []
for idx_surr in range(num_surrs):
    cco_surr.append(stc.corrcoef(conv.BinnedSpikeTrain([
コード例 #26
0
psth_list = []
for item in bst_sum_list:
    psth = scipy.convolve(gauKern[0], item)
    #plt.figure()
    #plt.plot(psth)
    #plt.axis('tight')
    #plt.xlim(0,3100)
    psth_list.append(psth)
#%%
###cross-trial correlations WITHIN-cell

from elephant.spike_train_correlation import corrcoef

spkt_corr_list = []
for item in binnedst_list:
    stcorr = corrcoef(item, binary=True)
    stcorr_mean = np.mean(stcorr)
    print(stcorr_mean)
    spkt_corr_list.append(stcorr_mean)
    stcorr_list = stcorr.flatten()
    #plt.figure()
    #plt.hist(stcorr_list,bins=200,normed=True,histtype='step')
#%%
"""
12 Dec 2017
The elephant BOX kernel doesn't seem to work really, it fails, not sure if me
or something wrong with the code itself. Honestly, could just make the array
myself for the most part as it's just a value of 1 for the width of 2delta.
Anyway, this first cell is how to do it on a single binary binned spike train.
Next cell will be doing it for all simulations.