def test_read_nse_data(self): t_start, t_stop = None, None # in samples nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') for el_id, el_dict in nio.parameters_nse.items(): filepath = nio.parameters_nse[el_id]['recording_file_name'] filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0] nio.read_nse(filename, seg, t_start=t_start, t_stop=t_stop, waveforms=True) spiketrain = seg.filter({'electrode_id': el_id}, objects=SpikeTrain)[0] # target_data = np.zeros((500, 32)) # timestamps = np.zeros(500) entries = [] with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries.append(line.split()) entries = np.asarray(entries, dtype=float) target_data = entries[:-1, 11:] timestamps = entries[:-1, 0] timestamps = (timestamps * pq.microsecond - nio.parameters_global['t_start']) np.testing.assert_array_equal(timestamps.magnitude, spiketrain.magnitude) np.testing.assert_array_equal(target_data, spiketrain.waveforms)
def test_read_nse_data(self): t_start, t_stop = None, None # in samples nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') for el_id, el_dict in nio.parameters_nse.iteritems(): filepath = nio.parameters_nse[el_id]['recording_file_name'] filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0] nio.read_nse(filename, seg, t_start=t_start, t_stop=t_stop, waveforms=True) spiketrain = seg.filter({'electrode_id': el_id}, objects=SpikeTrain)[0] # target_data = np.zeros((500, 32)) # timestamps = np.zeros(500) entries = [] with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries.append(line.split()) entries = np.asarray(entries, dtype=float) target_data = entries[:-1, 11:] timestamps = entries[:-1, 0] timestamps = (timestamps * pq.microsecond - nio.parameters_global['t_start']) np.testing.assert_array_equal(timestamps.magnitude, spiketrain.magnitude) np.testing.assert_array_equal(target_data, spiketrain.waveforms)
def test_read_nev_data(self): t_start, t_stop = 0 * pq.s, 1000 * pq.s nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') filename = 'Events' nio.read_nev(filename + '.nev', seg, t_start=t_start, t_stop=t_stop) timestamps = [] nttls = [] names = [] event_ids = [] with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries = line.split('\t') nttls.append(int(entries[5])) timestamps.append(int(entries[3])) names.append(entries[10].rstrip('\r\n')) event_ids.append(int(entries[4])) timestamps = (np.array(timestamps) * pq.microsecond - nio.parameters_global['t_start']) # masking only requested spikes mask = np.where(timestamps < t_stop)[0] # return if no event fits criteria if len(mask) == 0: return timestamps = timestamps[mask] nttls = np.asarray(nttls)[mask] names = np.asarray(names)[mask] event_ids = np.asarray(event_ids)[mask] for i in range(len(timestamps)): events = seg.filter({'nttl': nttls[i]}, objects=Event) events = [ e for e in events if (e.annotations['marker_id'] == event_ids[i] and e.labels == names[i]) ] self.assertTrue(len(events) == 1) self.assertTrue(timestamps[i] in events[0].times)
def test_read_nev_data(self): t_start, t_stop = 0 * pq.s, 1000 * pq.s nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') filename = 'Events' nio.read_nev(filename + '.nev', seg, t_start=t_start, t_stop=t_stop) timestamps = [] nttls = [] names = [] event_ids = [] with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries = line.split('\t') nttls.append(int(entries[5])) timestamps.append(int(entries[3])) names.append(entries[10].rstrip('\r\n')) event_ids.append(int(entries[4])) timestamps = (np.array(timestamps) * pq.microsecond - nio.parameters_global['t_start']) # masking only requested spikes mask = np.where(timestamps < t_stop)[0] # return if no event fits criteria if len(mask) == 0: return timestamps = timestamps[mask] nttls = np.asarray(nttls)[mask] names = np.asarray(names)[mask] event_ids = np.asarray(event_ids)[mask] for i in range(len(timestamps)): events = seg.filter({'nttl': nttls[i]}, objects=Event) events = [e for e in events if (e.annotations['marker_id'] == event_ids[i] and e.labels == names[i])] self.assertTrue(len(events) == 1) self.assertTrue(timestamps[i] in events[0].times)
def test_read_ncs_data(self): t_start, t_stop = 0, 500 * 512 # in samples nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') for el_id, el_dict in nio.parameters_ncs.items(): filepath = nio.parameters_ncs[el_id]['recording_file_name'] filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0] nio.read_ncs(filename, seg, t_start=t_start, t_stop=t_stop) anasig = seg.filter({'electrode_id': el_id}, objects=AnalogSignal)[0] target_data = np.zeros((16679, 512)) with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries = line.split() target_data[i, :] = np.asarray(entries[4:]) target_data = target_data.reshape((-1, 1)) * el_dict['ADBitVolts'] np.testing.assert_array_equal(target_data[:len(anasig)], anasig.magnitude)
def test_read_ncs_data(self): t_start, t_stop = 0, 500 * 512 # in samples nio = NeuralynxIO(self.sn, use_cache='never') seg = Segment('testsegment') for el_id, el_dict in nio.parameters_ncs.iteritems(): filepath = nio.parameters_ncs[el_id]['recording_file_name'] filename = filepath.split('/')[-1].split('\\')[-1].split('.')[0] nio.read_ncs(filename, seg, t_start=t_start, t_stop=t_stop) anasig = seg.filter({'electrode_id': el_id}, objects=AnalogSignal)[0] target_data = np.zeros((16679, 512)) with open(self.pd + '/%s.txt' % filename) as datafile: for i, line in enumerate(datafile): line = line.strip('\xef\xbb\xbf') entries = line.split() target_data[i, :] = np.asarray(entries[4:]) target_data = target_data.reshape((-1, 1)) * el_dict['ADBitVolts'] np.testing.assert_array_equal(target_data[:len(anasig)], anasig.magnitude)