def test_mean_firing_rate_typical_use_case(self): np.random.seed(92) st = homogeneous_poisson_process(rate=100 * pq.Hz, t_stop=100 * pq.s) rate1 = statistics.mean_firing_rate(st) rate2 = statistics.mean_firing_rate(st, t_start=st.t_start, t_stop=st.t_stop) self.assertEqual(rate1.units, rate2.units) self.assertAlmostEqual(rate1.item(), rate2.item())
def test_mean_firing_rate_with_spiketrain_set_ends(self): st = neo.SpikeTrain(self.test_array_1d, units='ms', t_stop=10.0) target = pq.Quantity(2 / 0.5, '1/ms') res = statistics.mean_firing_rate(st, t_start=0.4 * pq.ms, t_stop=0.9 * pq.ms) assert_array_almost_equal(res, target, decimal=9)
def spike_statistics(idx, row): from elephant.statistics import mean_firing_rate, cv, isi from elephant.conversion import BinnedSpikeTrain from elephant.spike_train_correlation import corrcoef print(idx) results = {} # read spike trains from file io = get_io(row["output_file"]) data_block = io.read()[0] spiketrains = data_block.segments[0].spiketrains # calculate mean firing rate results["spike_counts"] = sum(st.size for st in spiketrains) rates = [mean_firing_rate(st) for st in spiketrains] results["firing_rate"] = Quantity(rates, units=rates[0].units).rescale("1/s").mean() # calculate coefficient of variation of the inter-spike interval cvs = [cv(isi(st)) for st in spiketrains if st.size > 1] if len(cvs) > 0: results["cv_isi"] = sum(cvs)/len(cvs) else: results["cv_isi"] = 0 # calculate global cross-correlation #cc_matrix = corrcoef(BinnedSpikeTrain(spiketrains, binsize=5*ms)) #results["cc_min"] = cc_matrix.min() #results["cc_max"] = cc_matrix.max() #results["cc_mean"] = cc_matrix.mean() io.close() return results
def compute_elephant_mean_firing_rate(self, spikes_times, **elephant_kwargs): """A method to compute mean (across time) rate from an input of spikes' events or spikes' times using the elephant.statistics.mean_firing_rate method. Arguments: - spikes: a neo.core.SpikeTrain or an array of spikes' times or a dict with a key-value pair of "times" and spikes' times array - number_of_neurons=1: the number (integer) of neurons - duration: Default=None, in which case it is computed by start_time and end_time Returns: - the mean rate (float) - the neo.core.SpikeTrain used for the computation """ from quantities import ms from elephant.statistics import mean_firing_rate t_start, t_stop = self._assert_start_end_times_from_spikes_times( spikes_times) spikes_train = self._assert_spike_train(spikes_times) elephant_kwargs["t_start"] = elephant_kwargs.get( "t_start", t_start - self._fmin_resolution) * ms elephant_kwargs["t_stop"] = elephant_kwargs.get( "t_stop", t_stop + self._fmin_resolution) * ms if len(spikes_train): return mean_firing_rate(spikes_train, **elephant_kwargs), spikes_train else: return 0.0, spikes_train
def generate_prediction(self, model, **kwargs): rates = self.get_prediction(model) if rates is None: if kwargs: self.params.update(kwargs) spiketrains = model.produce_spiketrains(**self.params) rates = [mean_firing_rate(st).rescale('Hz') for st in spiketrains] self.set_prediction(model, rates) return rates
def test_mean_firing_rate_with_plain_array_2d_1_set_ends(self): st = self.test_array_2d target = np.array([4, 1, 3]) / (1.23 - 0.14) res = statistics.mean_firing_rate(st, axis=1, t_start=0.14, t_stop=1.23) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def spikes_to_rate( spikes,t_start,t_stop, windows=0.0): """ #WARNING function unused but keep it for idea Compute the rate of one spike train or multiple of spike trains #TODO : need to have add the overlapping of windows :param spikes: one spike train or multiple spike train :param t_start: time to start to compute rate :param t_stop: time to stop to compute rate :param windows: the window for compute rate # :param overlaps: FUTURE overlap of window :return: rates or variation of rates """ if windows == 0.0: #case without variation of rate if len(spikes[0].shape) ==0: # with only one rate result = [mean_firing_rate(spiketrain=spikes,t_start=t_start,t_stop=t_stop).rescale(Hz)] else: # with multiple rate result = [] for spike in spikes: result.append(mean_firing_rate(spiketrain=spike,t_start=t_start,t_stop=t_stop).rescale(Hz)) return np.array(result) else: # case with variation of rate rate = [] for time in np.arange(t_start,t_stop,windows): t_start_window = time*t_start.units t_stop_window = t_start_window+windows if len(spikes[0].shape) == 0: # for only one spike train result = [mean_firing_rate(spiketrain=spikes, t_start=t_start_window, t_stop=t_stop_window).rescale(Hz)] else: # for multiple spike train result = [] for spike in spikes: result.append(mean_firing_rate(spiketrain=spike, t_start=t_start_window, t_stop=t_stop_window).rescale(Hz)) rate.append(result) return np.array(rate)
def calculate_neuron_mfr_elephant(col, num_mins_per_bin, total_time): num_bins = np.int(total_time / num_mins_per_bin) col_bins = np.array_split(col, num_bins) mfrs = pd.Series(np.zeros(num_bins)) for ind, col_bin in enumerate(col_bins): spike_times = pd.to_numeric(col_bin[col_bin.notnull()].index.values) try: spike_train = SpikeTrain(times=spike_times, t_stop=spike_times[-1], units=ns) mfr = mean_firing_rate(spike_train) except IndexError: mfr = np.nan mfrs[ind] = mfr mfrs *= 10**10 return mfrs
def compute_spontan_rate(chxs, stim_off_epoch): # TODO: test ''' Calculates spontaneous firing rate Parameters ---------- chxs : list list of neo.core.ChannelIndex stim_off_epoch : neo.core.Epoch stimulus epoch Returns ------- out : defaultdict(dict) rates[channel_index_name][unit_id] = spontaneous rate ''' from collections import defaultdict from elephant.statistics import mean_firing_rate rates = defaultdict(dict) unit_rates = pq.Hz for chx in chxs: for un in chx.units: cluster_group = un.annotations.get('cluster_group') or 'noise' if cluster_group.lower() != "noise": sptr = un.spiketrains[0] unit_id = un.annotations["cluster_id"] trials = make_spiketrain_trials( epoch=stim_off_epoch, t_start=0 * pq.s, t_stop=stim_off_epoch.durations, spike_train=sptr) rate = 0 * unit_rates for trial in trials: rate += mean_firing_rate(trial, trial.t_start, trial.t_stop) rates[chx.name][unit_id] = rate / len(trials) return rates
def compute_orientation_tuning(orient_trials): from exana.stimulus.tools import (make_orientation_trials, _convert_string_to_quantity_scalar) ''' Calculates the mean firing rate for each orientation Parameters ---------- trials : collections.OrderedDict OrderedDict with orients as keys and trials as values. Returns ------- rates : quantity array average rates orients : quantity array sorted stimulus orientations ''' from elephant.statistics import mean_firing_rate unit_orients = pq.deg unit_rates = pq.Hz orient_count = len(orient_trials) rates = np.zeros((orient_count)) * unit_rates orients = np.zeros((orient_count)) * unit_orients for i, (orient, trials) in enumerate(orient_trials.items()): orient = _convert_string_to_quantity_scalar(orient) rate = 0 * unit_rates for trial in trials: rate += mean_firing_rate(trial, trial.t_start, trial.t_stop) rates[i] = rate / len(trials) orients[i] = orient.rescale(unit_orients) return rates, orients
def test_mean_firing_rate_with_plain_array_2d_None(self): st = self.test_array_2d target = self.targ_array_2d_None/self.max_array_2d_None res = es.mean_firing_rate(st, axis=None) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_plain_array_2d_1_set_ends(self): st = self.test_array_2d target = np.array([4, 1, 3])/(1.23-0.14) res = es.mean_firing_rate(st, axis=1, t_start=0.14, t_stop=1.23) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_plain_array_3d_2(self): st = self.test_array_3d target = np.sum(self.test_array_3d, 2)/5. res = es.mean_firing_rate(st, axis=2, t_stop=5.) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_plain_array_1d_set_ends(self): st = self.test_array_1d target = self.targ_array_1d/(1.23-0.3) res = es.mean_firing_rate(st, t_start=0.3, t_stop=1.23) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def statistic_mean_firing_rates(self, spiketrain, t_start=None, t_stop=None, axis=None): mean_firing_rate(spiketrain, t_start, t_stop, axis)
def test_mean_firing_rate_with_quantities_1d_set_ends(self): st = pq.Quantity(self.test_array_1d, units='ms') target = pq.Quantity(2/0.6, '1/ms') res = es.mean_firing_rate(st, t_start=400*pq.us, t_stop=1.) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_spiketrain(self): st = neo.SpikeTrain(self.test_array_1d, units='ms', t_stop=10.0) target = pq.Quantity(self.targ_array_1d/10., '1/ms') res = es.mean_firing_rate(st) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_plain_array_2d_default(self): st = self.test_array_2d target = self.targ_array_2d_default / self.max_array_2d_default res = statistics.mean_firing_rate(st) assert not isinstance(res, pq.Quantity) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_spiketrain_set_ends(self): st = neo.SpikeTrain(self.test_array_1d, units='ms', t_stop=10.0) target = pq.Quantity(2/0.5, '1/ms') res = es.mean_firing_rate(st, t_start=0.4, t_stop=0.9) assert_array_almost_equal(res, target, decimal=9)
def test_mean_firing_rate_with_quantities_1d(self): st = pq.Quantity(self.test_array_1d, units='ms') target = pq.Quantity(self.targ_array_1d/self.max_array_1d, '1/ms') res = es.mean_firing_rate(st) assert_array_almost_equal(res, target, decimal=9)
plt.show() os.chdir('Analysis/Noise_spike_rasters') #plt.savefig('Noise_raster_'+file_name+'.png', bbox_inches='tight') #plt.close() os.chdir('../..') print 'Spike raster for ' + file_name + 'saved' #%% #doing FI curve #finding mean firing rate for entire sweep from elephant.statistics import mean_firing_rate firing_rate_list = [] for spiketrain in neospiketrain_list: temp_firing_rate = mean_firing_rate(spiketrain.rescale(pq.s), t_start=0.5, t_stop=1.5) firing_rate_list.append(np.asarray(temp_firing_rate)) # %% conversion of discrete spike times to binary counts from elephant.conversion import BinnedSpikeTrain bst_list = BinnedSpikeTrain(del_half_neospiketrain_list, binsize=1.0 * pq.ms, t_start=700.0 * pq.ms, t_stop=3200.0 * pq.ms) bst_arr = bst_list.to_array() # export binned spike times to an array bst_df = pd.DataFrame(bst_arr).T # turn into a df and transpose (.T) bst_sum = bst_df.apply(np.sum, axis=1) # sum by row across columns # plt.figure()