def testEpochsFromSpiketrain(self): """test for epoch generation from a spiketrain""" st = sp.array([50, 100, 150]) cut = (5, 5) st_ep = sp.array([ [45, 55], [95, 105], [145, 155], ]) assert_equal(epochs_from_spiketrain(st, cut), st_ep) assert_equal(epochs_from_spiketrain(st, cut, end=150), st_ep[:-1]) assert_equal(epochs_from_spiketrain(st, cut), epochs_from_spiketrain(st, sum(cut)))
print "retrieving multiunit spike set @tf=%d" % tf spks_info = [] spks = [] for id_trl in trial_ids: trial_st = None try: trial_st = db.get_unit_data(id_mu, id_trl)["spiketrain"] if trial_st.size == 0: print "\tno spiketrain for %s" % db.get_fname_for_id(id_trl) continue trial_spks, trial_st = get_aligned_spikes( data[id_trl], trial_st, tf, align_at=align_at, mc=False, kind="min" ) end = data[id_trl].shape[0] nep = epochs_from_spiketrain(trial_st, tf, end=end) nep = invert_epochs(nep, end=end) nep = merge_epochs(nep) ndet.update(data[id_trl], epochs=nep) spks.append(trial_spks) spks_info.append(sp.vstack([[id_trl] * trial_st.size, trial_st]).T) print "\tprocessed %s" % db.get_fname_for_id(id_trl) except Exception, e: raise RuntimeError("error processing %s\n%s" % (db.get_fname_for_id(id_trl), e)) finally: del trial_st spks_info = sp.vstack(spks_info) spks = sp.vstack(spks) print "found %d spikes in total" % spks.shape[0] print "done."