def generate_spike_train_and_epoch(): from exana.stimulus import salt, generate_salt_trials from exana.misc import concatenate_spiketrains from elephant.spike_train_generation import homogeneous_poisson_process as hpp np.random.seed(12345) N_trials = 100 stim_duration = 100 * pq.ms stim_start = 1000 * pq.ms stim_latency = 50 * pq.ms trial_duration = 1500 * pq.ms trains = [] stim_onsets = [] for n in range(N_trials): offset = trial_duration * n stim_onsets.append(stim_start + offset) trains.extend([hpp(rate=2 * pq.Hz, t_start=offset, t_stop=stim_start + stim_latency + offset), hpp(rate=8 * pq.Hz, t_start=stim_start + stim_latency + offset, t_stop=stim_start + stim_duration + offset), hpp(rate=2 * pq.Hz, t_start=stim_start + stim_duration + offset, t_stop=trial_duration + offset)]) spike_train = concatenate_spiketrains(trains) epoch = neo.Epoch( times=np.array(stim_onsets) * pq.ms, durations=np.array([stim_duration] * len(stim_onsets)) * pq.ms) return spike_train, epoch
def test_concatenate_spiketrains(): from exana.misc import concatenate_spiketrains spiketrain1 = neo.SpikeTrain(times=np.arange(10), t_stop=10, units='s', waveforms=np.ones((10,1,5)) * pq.V) spiketrain2 = neo.SpikeTrain(times=np.arange(10, 25), t_stop=25, units='s', waveforms=np.ones((15,1,5)) * pq.V) spiketrain = concatenate_spiketrains([spiketrain1, spiketrain2]) spiketrain_true = np.concatenate((np.arange(10), np.arange(10, 25))) waveforms_true = np.concatenate((np.ones((10,1,5)), np.ones((15,1,5)))) assert np.array_equal(spiketrain.times.magnitude, spiketrain_true) assert spiketrain.times.units == pq.s.units assert np.array_equal(spiketrain.waveforms.magnitude, waveforms_true) assert spiketrain.waveforms.units == pq.V assert spiketrain.t_stop == 25 * pq.s with pytest.raises(ValueError): spiketrain2 = neo.SpikeTrain(times=np.arange(10, 25), t_stop=25, units='ms', waveforms=np.ones((15,1,5)) * pq.V) spiketrain = concatenate_spiketrains([spiketrain1, spiketrain2])
def test_baysian(): from exana.stimulus import baysian_latency, generate_salt_trials from exana.misc import concatenate_spiketrains from elephant.spike_train_generation import homogeneous_poisson_process as hpp np.random.seed(12345) N_trials = 100 stim_duration = 100 * pq.ms stim_start = 1000 * pq.ms stim_latency = 50 * pq.ms trial_duration = 1150 * pq.ms trains = [] stim_onsets = [] for n in range(N_trials): offset = trial_duration * n stim_onsets.append(offset) trains.extend([ hpp(rate=2 * pq.Hz, t_start=offset, t_stop=stim_start + stim_latency + offset), hpp(rate=8 * pq.Hz, t_start=stim_start + stim_latency + offset, t_stop=stim_start + stim_duration + offset) ]) spike_train = concatenate_spiketrains(trains) epoch = neo.Epoch(times=np.array(stim_onsets) * pq.ms, durations=np.array([trial_duration] * len(stim_onsets)) * pq.ms) from exana.stimulus import make_spiketrain_trials trials = make_spiketrain_trials(spike_train=spike_train, epoch=epoch) from elephant.statistics import time_histogram t_start = trials[0].t_start.rescale('s') t_stop = trials[0].t_stop.rescale('s') binsize = (abs(t_start) + abs(t_stop)) / float(100) time_hist = time_histogram(trials, binsize, t_start=t_start, t_stop=t_stop, output='counts', binary=False) bins = np.arange(t_start.magnitude, t_stop.magnitude, binsize.magnitude) count_data = time_hist.magnitude trace = baysian_latency(count_data) return count_data, trace
def test_salt_exc(): from exana.stimulus import salt, generate_salt_trials from exana.misc import concatenate_spiketrains from elephant.spike_train_generation import homogeneous_poisson_process as hpp np.random.seed(12345) N_trials = 100 stim_duration = 100 * pq.ms stim_start = 1000 * pq.ms stim_latency = 50 * pq.ms trial_duration = 1500 * pq.ms trains = [] stim_onsets = [] for n in range(N_trials): offset = trial_duration * n stim_onsets.append(stim_start + offset) trains.extend([hpp(rate=2 * pq.Hz, t_start=offset, t_stop=stim_start + stim_latency + offset), hpp(rate=8 * pq.Hz, t_start=stim_start + stim_latency + offset, t_stop=stim_start + stim_duration + offset), hpp(rate=2 * pq.Hz, t_start=stim_start + stim_duration + offset, t_stop=trial_duration + offset)]) spike_train = concatenate_spiketrains(trains) epoch = neo.Epoch( times=np.array(stim_onsets) * pq.ms, durations=np.array([stim_duration] * len(stim_onsets)) * pq.ms) baseline_trials, test_trials = generate_salt_trials(spike_train, epoch) latencies, p_values, I_values = salt(baseline_trials=baseline_trials, test_trials=test_trials, winsize=0.01*pq.s, latency_step=0.01*pq.s) idxs, = np.where(np.array(p_values) < 0.01) print(latencies) print(p_values) assert latencies[min(idxs)] == stim_latency return baseline_trials, test_trials, spike_train, epoch