def test_regression_spiky(): # standard example st1 = SpikeTrain(np.arange(100, 1201, 100), 1300) st2 = SpikeTrain(np.arange(100, 1201, 110), 1300) isi_dist = spk.isi_distance(st1, st2) assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15) isi_profile = spk.isi_profile(st1, st2) assert_equal(isi_profile.y, 0.1/1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) assert_equal(spike_dist, 2.1105878248735391e-01) spike_sync = spk.spike_sync(st1, st2) assert_equal(spike_sync, 8.6956521739130432e-01) # multivariate check spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", (0.0, 4000.0)) isi_dist = spk.isi_distance_multi(spike_trains) # get the full precision from SPIKY assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) assert_equal(len(spike_profile.y1)+len(spike_profile.y2), 1252) spike_dist = spk.spike_distance_multi(spike_trains) # get the full precision from SPIKY assert_almost_equal(spike_dist, 2.4432433330596512e-01, decimal=15) spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY assert_equal(spike_sync, 0.7183531505298066)
def test_multi_variate_subsets(): spike_trains = spk.load_spike_trains_from_txt( os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0)) sub_set = [1, 3, 5, 7] spike_trains_sub_set = [spike_trains[i] for i in sub_set] v1 = spk.isi_distance_multi(spike_trains_sub_set) v2 = spk.isi_distance_multi(spike_trains, sub_set) assert_equal(v1, v2) v1 = spk.spike_distance_multi(spike_trains_sub_set) v2 = spk.spike_distance_multi(spike_trains, sub_set) assert_equal(v1, v2) v1 = spk.spike_sync_multi(spike_trains_sub_set) v2 = spk.spike_sync_multi(spike_trains, sub_set) assert_equal(v1, v2)
def test_multi_variate_subsets(): spike_trains = spk.load_spike_trains_from_txt("test/PySpike_testdata.txt", (0.0, 4000.0)) sub_set = [1, 3, 5, 7] spike_trains_sub_set = [spike_trains[i] for i in sub_set] v1 = spk.isi_distance_multi(spike_trains_sub_set) v2 = spk.isi_distance_multi(spike_trains, sub_set) assert_equal(v1, v2) v1 = spk.spike_distance_multi(spike_trains_sub_set) v2 = spk.spike_distance_multi(spike_trains, sub_set) assert_equal(v1, v2) v1 = spk.spike_sync_multi(spike_trains_sub_set) v2 = spk.spike_sync_multi(spike_trains, sub_set) assert_equal(v1, v2)
def test_regression_spiky(): # standard example st1 = SpikeTrain(np.arange(100, 1201, 100), 1300) st2 = SpikeTrain(np.arange(100, 1201, 110), 1300) isi_dist = spk.isi_distance(st1, st2) assert_almost_equal(isi_dist, 9.0909090909090939e-02, decimal=15) isi_profile = spk.isi_profile(st1, st2) assert_equal(isi_profile.y, 0.1 / 1.1 * np.ones_like(isi_profile.y)) spike_dist = spk.spike_distance(st1, st2) assert_equal(spike_dist, 0.211058782487353908) spike_sync = spk.spike_sync(st1, st2) assert_equal(spike_sync, 8.6956521739130432e-01) # multivariate check spike_trains = spk.load_spike_trains_from_txt( os.path.join(TEST_PATH, "PySpike_testdata.txt"), (0.0, 4000.0)) isi_dist = spk.isi_distance_multi(spike_trains) # get the full precision from SPIKY assert_almost_equal(isi_dist, 0.17051816816999129656, decimal=15) spike_profile = spk.spike_profile_multi(spike_trains) assert_equal(len(spike_profile.y1) + len(spike_profile.y2), 1252) spike_dist = spk.spike_distance_multi(spike_trains) # get the full precision from SPIKY assert_almost_equal(spike_dist, 0.25188056475463755, decimal=15) spike_sync = spk.spike_sync_multi(spike_trains) # get the full precision from SPIKY assert_equal(spike_sync, 0.7183531505298066) # Eero's edge correction example st1 = SpikeTrain([0.5, 1.5, 2.5], 6.0) st2 = SpikeTrain([3.5, 4.5, 5.5], 6.0) f = spk.spike_profile(st1, st2) expected_times = np.array([0.0, 0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.0]) y_all = np.array([ 0.271604938271605, 0.271604938271605, 0.271604938271605, 0.617283950617284, 0.617283950617284, 0.444444444444444, 0.285714285714286, 0.285714285714286, 0.444444444444444, 0.617283950617284, 0.617283950617284, 0.271604938271605, 0.271604938271605, 0.271604938271605 ]) expected_y1 = y_all[::2] expected_y2 = y_all[1::2] assert_equal(f.x, expected_times) assert_array_almost_equal(f.y1, expected_y1, decimal=14) assert_array_almost_equal(f.y2, expected_y2, decimal=14)
def test_regression_random(): spike_file = "test/numeric/regression_random_spikes.mat" spikes_name = "spikes" result_name = "Distances" result_file = "test/numeric/regression_random_results_cSPIKY.mat" spike_train_sets = loadmat(spike_file)[spikes_name][0] results_cSPIKY = loadmat(result_file)[result_name] for i, spike_train_data in enumerate(spike_train_sets): spike_trains = [] for spikes in spike_train_data[0]: spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) isi = spk.isi_distance_multi(spike_trains) isi_prof = spk.isi_profile_multi(spike_trains).avrg() spike = spk.spike_distance_multi(spike_trains) spike_prof = spk.spike_profile_multi(spike_trains).avrg() spike_sync = spk.spike_sync_multi(spike_trains) spike_sync_prof = spk.spike_sync_profile_multi(spike_trains).avrg() assert_almost_equal(isi, results_cSPIKY[i][0], decimal=14, err_msg="Index: %d, ISI" % i) assert_almost_equal(isi_prof, results_cSPIKY[i][0], decimal=14, err_msg="Index: %d, ISI" % i) assert_almost_equal(spike, results_cSPIKY[i][1], decimal=14, err_msg="Index: %d, SPIKE" % i) assert_almost_equal(spike_prof, results_cSPIKY[i][1], decimal=14, err_msg="Index: %d, SPIKE" % i) assert_almost_equal(spike_sync, spike_sync_prof, decimal=14, err_msg="Index: %d, SPIKE-Sync" % i)
def check_regression_dataset(spike_file="benchmark.mat", spikes_name="spikes", result_file="results_cSPIKY.mat", result_name="Distances"): """ Debuging function """ np.set_printoptions(precision=15) spike_train_sets = loadmat(spike_file)[spikes_name][0] results_cSPIKY = loadmat(result_file)[result_name] err_max = 0.0 err_max_ind = -1 err_count = 0 for i, spike_train_data in enumerate(spike_train_sets): spike_trains = [] for spikes in spike_train_data[0]: spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) isi = spk.isi_distance_multi(spike_trains) spike = spk.spike_distance_multi(spike_trains) # spike_sync = spk.spike_sync_multi(spike_trains) if abs(isi - results_cSPIKY[i][0]) > 1E-14: print("Error in ISI:", i, isi, results_cSPIKY[i][0]) print("Spike trains:") for st in spike_trains: print(st.spikes) err = abs(spike - results_cSPIKY[i][1]) if err > 1E-14: err_count += 1 if err > err_max: err_max = err err_max_ind = i print("Total Errors:", err_count) if err_max_ind > -1: print("Max SPIKE distance error:", err_max, "at index:", err_max_ind) spike_train_data = spike_train_sets[err_max_ind] for spikes in spike_train_data[0]: print(spikes.flatten())
def check_single_spike_train_set(index): """ Debuging function """ np.set_printoptions(precision=15) spike_file = "regression_random_spikes.mat" spikes_name = "spikes" result_name = "Distances" result_file = "regression_random_results_cSPIKY.mat" spike_train_sets = loadmat(spike_file)[spikes_name][0] results_cSPIKY = loadmat(result_file)[result_name] spike_train_data = spike_train_sets[index] spike_trains = [] for spikes in spike_train_data[0]: print("Spikes:", spikes.flatten()) spike_trains.append(spk.SpikeTrain(spikes.flatten(), 100.0)) print(spk.spike_distance_multi(spike_trains)) print(results_cSPIKY[index][1]) print(spike_trains[1].spikes)
edges=(0, 4000)) t_loading = time.clock() print("Number of spike trains: %d" % len(spike_trains)) num_of_spikes = sum([len(spike_trains[i].spikes) for i in xrange(len(spike_trains))]) print("Number of spikes: %d" % num_of_spikes) # calculate the multivariate spike distance f = spk.spike_profile_multi(spike_trains) t_spike = time.clock() # print the average avrg = f.avrg() print("Spike distance from average: %.8f" % avrg) t_avrg = time.clock() # compute average distance directly, should give the same result as above spike_dist = spk.spike_distance_multi(spike_trains) print("Spike distance directly: %.8f" % spike_dist) t_dist = time.clock() print("Loading: %9.1f ms" % time_diff_in_ms(t_start, t_loading)) print("Computing profile: %9.1f ms" % time_diff_in_ms(t_loading, t_spike)) print("Averaging: %9.1f ms" % time_diff_in_ms(t_spike, t_avrg)) print("Computing distance: %9.1f ms" % time_diff_in_ms(t_avrg, t_dist)) print("Total: %9.1f ms" % time_diff_in_ms(t_start, t_dist))