def _read_analogsignalarray(self, node, parent): attributes = self._get_standard_attributes(node) # todo: handle channel_index sampling_rate = self._get_quantity(node["sampling_rate"]) t_start = self._get_quantity(node["t_start"]) signal = AnalogSignal(self._get_quantity(node["signal"]), sampling_rate=sampling_rate, t_start=t_start, **attributes) if self._lazy: signal.lazy_shape = node["signal"].shape if len(signal.lazy_shape) == 1: signal.lazy_shape = (signal.lazy_shape[0], 1) signal.segment = parent self.object_refs[node.attrs["object_ref"]] = signal return signal
def read_analogsignal(self , # the 2 first key arguments are imposed by neo.io API lazy = False, cascade = True, channel_index = 0, segment_duration = 15., t_start = -1, ): """ With this IO AnalogSignal can e acces directly with its channel number """ sr = 10000. sinus_freq = 3. # Hz #time vector for generated signal: tvect = np.arange(t_start, t_start+ segment_duration , 1./sr) if lazy: anasig = AnalogSignal([], units='V', sampling_rate=sr * pq.Hz, t_start=t_start * pq.s, channel_index=channel_index) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = tvect.shape else: # create analogsignal (sinus of 3 Hz) sig = np.sin(2*np.pi*tvect*sinus_freq + channel_index/5.*2*np.pi)+np.random.rand(tvect.size) anasig = AnalogSignal(sig, units= 'V', sampling_rate=sr * pq.Hz, t_start=t_start * pq.s, channel_index=channel_index) # for attributes out of neo you can annotate anasig.annotate(info = 'it is a sinus of %f Hz' %sinus_freq ) return anasig
def _read_analogsignalarray(self, node, parent): attributes = self._get_standard_attributes(node) # todo: handle channel_index sampling_rate = self._get_quantity(node["sampling_rate"]) t_start = self._get_quantity(node["t_start"]) signal = AnalogSignal(self._get_quantity(node["signal"]), sampling_rate=sampling_rate, t_start=t_start, **attributes) if self._lazy: signal.lazy_shape = node["signal"].shape if len(signal.lazy_shape) == 1: signal.lazy_shape = (signal.lazy_shape[0], 1) signal.segment = parent self.object_refs[node.attrs["object_ref"]] = signal return signal
def read_analogsignal(self , # the 2 first key arguments are imposed by neo.io API lazy = False, cascade = True, channel_index = 0, segment_duration = 15., t_start = -1, ): """ With this IO AnalogSignal can e acces directly with its channel number """ sr = 10000. sinus_freq = 3. # Hz #time vector for generated signal: tvect = np.arange(t_start, t_start+ segment_duration , 1./sr) if lazy: anasig = AnalogSignal([], units='V', sampling_rate=sr * pq.Hz, t_start=t_start * pq.s, channel_index=channel_index) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = tvect.shape else: # create analogsignal (sinus of 3 Hz) sig = np.sin(2*np.pi*tvect*sinus_freq + channel_index/5.*2*np.pi)+np.random.rand(tvect.size) anasig = AnalogSignal(sig, units= 'V', sampling_rate=sr * pq.Hz, t_start=t_start * pq.s, channel_index=channel_index) # for attributes out of neo you can annotate anasig.annotate(info = 'it is a sinus of %f Hz' %sinus_freq ) return anasig
def read_analogsignal(self, channel_index=None, lazy=False, cascade=True): """ Read raw traces Arguments: channel_index: must be integer array """ if self._attrs["app_data"]: bit_volts = self._attrs["app_data"]["channel_bit_volts"] sig_unit = "uV" else: bit_volts = np.ones((self._attrs["shape"][1])) # TODO: find conversion in phy generated files sig_unit = "bit" if lazy: anasig = AnalogSignal( [], units=sig_unit, sampling_rate=self._attrs["kwik"]["sample_rate"] * pq.Hz, t_start=self._attrs["kwik"]["start_time"] * pq.s, ) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = self._attrs["shape"][0] else: data = self._kwd["recordings"][str(self._dataset)]["data"].value[:, channel_index] data = data * bit_volts[channel_index] anasig = AnalogSignal( data, units=sig_unit, sampling_rate=self._attrs["kwik"]["sample_rate"] * pq.Hz, t_start=self._attrs["kwik"]["start_time"] * pq.s, ) data = [] # delete from memory # for attributes out of neo you can annotate anasig.annotate(info="raw traces") return anasig
def read_analogsignal(self, # the 2 first key arguments are imposed by neo.io lazy = False, cascade = True, #channel index as given by the neuroshare API channel_index = 0, #time in seconds to be read segment_duration = 0., #time in seconds to start reading from t_start = 0., ): #some controls: #if no segment duration is given, use the complete file if segment_duration ==0.: segment_duration=float(self.metadata["TimeSpan"]) #if the segment duration is bigger than file, use the complete file if segment_duration >=float(self.metadata["TimeSpan"]): segment_duration=float(self.metadata["TimeSpan"]) if lazy: anasig = AnalogSignal([], units="V", sampling_rate = self.metadata["sampRate"] * pq.Hz, t_start=t_start * pq.s, ) #create a dummie time vector tvect = np.arange(t_start, t_start+ segment_duration , 1./self.metadata["sampRate"]) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = tvect.shape else: #get the analog object sig = self.fd.get_entity(channel_index) #get the units (V, mV etc) sigUnits = sig.units #get the electrode number chanName = sig.label[-4:] #transform t_start into index (reading will start from this index) startat = int(t_start*self.metadata["sampRate"]) #get the number of bins to read in bins = int((segment_duration+t_start) * self.metadata["sampRate"]) #if the number of bins to read is bigger than #the total number of bins, read only till the end of analog object if startat+bins > sig.item_count: bins = sig.item_count-startat #read the data from the sig object sig,_,_ = sig.get_data(index = startat, count = bins) #store it to the 'AnalogSignal' object anasig = AnalogSignal(sig, units = sigUnits, sampling_rate=self.metadata["sampRate"] * pq.Hz, t_start=t_start * pq.s, t_stop = (t_start+segment_duration)*pq.s, channel_index=channel_index) # annotate from which electrode the signal comes from anasig.annotate(info = "signal from channel %s" %chanName ) return anasig
def read_segment(self, n_start, n_stop, chlist=None, lazy=False, cascade=True): """Reads a Segment from the file and stores in database. The Segment will contain one AnalogSignal for each channel and will go from n_start to n_stop (in samples). Arguments: n_start : time in samples that the Segment begins n_stop : time in samples that the Segment ends Python indexing is used, so n_stop is not inclusive. Returns a Segment object containing the data. """ # If no channel numbers provided, get all of them if chlist is None: chlist = self.loader.get_neural_channel_numbers() # Conversion from bits to full_range units conversion = self.full_range / 2**(8*self.header.sample_width) # Create the Segment seg = Segment(file_origin=self.filename) t_start = float(n_start) / self.header.f_samp t_stop = float(n_stop) / self.header.f_samp seg.annotate(t_start=t_start) seg.annotate(t_stop=t_stop) # Load data from each channel and store for ch in chlist: if lazy: sig = np.array([]) * conversion else: # Get the data from the loader sig = np.array(\ self.loader._get_channel(ch)[n_start:n_stop]) * conversion # Create an AnalogSignal with the data in it anasig = AnalogSignal(signal=sig, sampling_rate=self.header.f_samp*pq.Hz, t_start=t_start*pq.s, file_origin=self.filename, description='Channel %d from %f to %f' % (ch, t_start, t_stop), channel_index=int(ch)) if lazy: anasig.lazy_shape = n_stop-n_start # Link the signal to the segment seg.analogsignals.append(anasig) # Link the signal to the recording channel from which it came #rc = self.channel_number_to_recording_channel[ch] #rc.analogsignals.append(anasig) return seg
def read_segment(self, n_start, n_stop, chlist=None, lazy=False, cascade=True): """Reads a Segment from the file and stores in database. The Segment will contain one AnalogSignal for each channel and will go from n_start to n_stop (in samples). Arguments: n_start : time in samples that the Segment begins n_stop : time in samples that the Segment ends Python indexing is used, so n_stop is not inclusive. Returns a Segment object containing the data. """ # If no channel numbers provided, get all of them if chlist is None: chlist = self.loader.get_neural_channel_numbers() # Conversion from bits to full_range units conversion = self.full_range / 2**(8*self.header.sample_width) # Create the Segment seg = Segment(file_origin=self.filename) t_start = float(n_start) / self.header.f_samp t_stop = float(n_stop) / self.header.f_samp seg.annotate(t_start=t_start) seg.annotate(t_stop=t_stop) # Load data from each channel and store for ch in chlist: if lazy: sig = np.array([]) * conversion else: # Get the data from the loader sig = np.array(\ self.loader._get_channel(ch)[n_start:n_stop]) * conversion # Create an AnalogSignal with the data in it anasig = AnalogSignal(signal=sig, sampling_rate=self.header.f_samp*pq.Hz, t_start=t_start*pq.s, file_origin=self.filename, description='Channel %d from %f to %f' % (ch, t_start, t_stop), channel_index=int(ch)) if lazy: anasig.lazy_shape = n_stop-n_start # Link the signal to the segment seg.analogsignals.append(anasig) # Link the signal to the recording channel from which it came #rc = self.channel_number_to_recording_channel[ch] #rc.analogsignals.append(anasig) return seg
def _create_lazy_analogsignal(self, shape, data, uid, t_start): attrs = data.attrs signal = AnalogSignal([], dtype=data.dtype, units=attrs['unit'], t_start=t_start, sampling_period=pq.Quantity( attrs['dt'], attrs['tunit'])) signal.lazy_shape = shape return signal
def read_block(self, lazy=False, cascade=True): if self.filename is not None: self.stfio_rec = stfio.read(self.filename) bl = Block() bl.description = self.stfio_rec.file_description bl.annotate(comment=self.stfio_rec.comment) try: bl.rec_datetime = self.stfio_rec.datetime except: bl.rec_datetime = None if not cascade: return bl dt = np.round(self.stfio_rec.dt * 1e-3, 9) * pq.s # ms to s sampling_rate = 1.0 / dt t_start = 0 * pq.s # iterate over sections first: for j, recseg in enumerate(self.stfio_rec[0]): seg = Segment(index=j) length = len(recseg) # iterate over channels: for i, recsig in enumerate(self.stfio_rec): name = recsig.name unit = recsig.yunits try: pq.Quantity(1, unit) except: unit = '' if lazy: signal = pq.Quantity([], unit) else: signal = pq.Quantity(recsig[j], unit) anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, name=str(name), channel_index=i) if lazy: anaSig.lazy_shape = length seg.analogsignals.append(anaSig) bl.segments.append(seg) t_start = t_start + length * dt bl.create_many_to_one_relationship() return bl
def read_block(self, lazy=False, cascade=True): if self.filename is not None: self.stfio_rec = stfio.read(self.filename) bl = Block() bl.description = self.stfio_rec.file_description bl.annotate(comment=self.stfio_rec.comment) try: bl.rec_datetime = self.stfio_rec.datetime except: bl.rec_datetime = None if not cascade: return bl dt = np.round(self.stfio_rec.dt * 1e-3, 9) * pq.s # ms to s sampling_rate = 1.0/dt t_start = 0 * pq.s # iterate over sections first: for j, recseg in enumerate(self.stfio_rec[0]): seg = Segment(index=j) length = len(recseg) # iterate over channels: for i, recsig in enumerate(self.stfio_rec): name = recsig.name unit = recsig.yunits try: pq.Quantity(1, unit) except: unit = '' if lazy: signal = pq.Quantity([], unit) else: signal = pq.Quantity(recsig[j], unit) anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, name=str(name), channel_index=i) if lazy: anaSig.lazy_shape = length seg.analogsignals.append(anaSig) bl.segments.append(seg) t_start = t_start + length * dt bl.create_many_to_one_relationship() return bl
def read_analogsignal( self, channel_index=None, lazy=False, cascade=True, ): """ Read raw traces Arguments: channel_index: must be integer """ try: channel_index = int(channel_index) except TypeError: print('channel_index must be int, not %s' % type(channel_index)) if self._attrs['app_data']: bit_volts = self._attrs['app_data']['channel_bit_volts'] sig_unit = 'uV' else: bit_volts = np.ones( (self._attrs['shape'][1] )) # TODO: find conversion in phy generated files sig_unit = 'bit' if lazy: anasig = AnalogSignal( [], units=sig_unit, sampling_rate=self._attrs['kwik']['sample_rate'] * pq.Hz, t_start=self._attrs['kwik']['start_time'] * pq.s, channel_index=channel_index, ) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = self._attrs['shape'][0] else: data = self._kwd['recordings'][str( self._dataset)]['data'].value[:, channel_index] data = data * bit_volts[channel_index] anasig = AnalogSignal( data, units=sig_unit, sampling_rate=self._attrs['kwik']['sample_rate'] * pq.Hz, t_start=self._attrs['kwik']['start_time'] * pq.s, channel_index=channel_index, ) data = [] # delete from memory # for attributes out of neo you can annotate anasig.annotate(info='raw traces') return anasig
def read_analogsignal(self, lazy=False, cascade=True): if not HAVE_IGOR: raise Exception( "igor package not installed. Try `pip install igor`") data = bw.load(self.filename) version = data['version'] if version > 3: raise IOError( "Igor binary wave file format version {0} is not supported.". format(version)) content = data['wave'] if "padding" in content: assert content[ 'padding'].size == 0, "Cannot handle non-empty padding" if lazy: # not really lazy, since the `igor` module loads the data anyway signal = np.array((), dtype=content['wData'].dtype) else: signal = content['wData'] note = content['note'] header = content['wave_header'] name = header['bname'] assert header['botFullScale'] == 0 assert header['topFullScale'] == 0 units = "".join(header['dataUnits']) time_units = "".join(header['xUnits']) or "s" t_start = pq.Quantity(header['hsB'], time_units) sampling_period = pq.Quantity(header['hsA'], time_units) if self.parse_notes: try: annotations = self.parse_notes(note) except ValueError: warn("Couldn't parse notes field.") annotations = {'note': note} else: annotations = {'note': note} signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start, sampling_period=sampling_period, name=name, file_origin=self.filename, **annotations) if lazy: signal.lazy_shape = content['wData'].shape return signal
def read_analogsignal(self, channel_index=None, lazy=False, cascade=True, ): """ Read raw traces Arguments: channel_index: must be integer """ try: channel_index = int(channel_index) except TypeError: print('channel_index must be int, not %s' %type(channel_index)) if self._attrs['app_data']: bit_volts = self._attrs['app_data']['channel_bit_volts'] sig_unit = 'uV' else: bit_volts = np.ones((self._attrs['shape'][1])) # TODO: find conversion in phy generated files sig_unit = 'bit' if lazy: anasig = AnalogSignal([], units=sig_unit, sampling_rate=self._attrs['kwik']['sample_rate']*pq.Hz, t_start=self._attrs['kwik']['start_time']*pq.s, channel_index=channel_index, ) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = self._attrs['shape'][0] else: data = self._kwd['recordings'][str(self._dataset)]['data'].value[:,channel_index] data = data * bit_volts[channel_index] anasig = AnalogSignal(data, units=sig_unit, sampling_rate=self._attrs['kwik']['sample_rate']*pq.Hz, t_start=self._attrs['kwik']['start_time']*pq.s, channel_index=channel_index, ) data = [] # delete from memory # for attributes out of neo you can annotate anasig.annotate(info='raw traces') return anasig
def _extract_signals(self, data, metadata, lazy): signal = None if lazy and data.size > 0: signal = AnalogSignal([], units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms) signal.lazy_shape = None else: arr = numpy.vstack(self._extract_array(data, channel_index) for channel_index in range(metadata['first_index'], metadata['last_index'] + 1)) if len(arr) > 0: signal = AnalogSignal(arr.T, units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms) if signal is not None: signal.annotate(label=metadata["label"], variable=metadata["variable"]) return signal
def _extract_signal(self, data, metadata, channel_index, lazy): signal = None if lazy: if channel_index in data[:, 1]: signal = AnalogSignal([], units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms, channel_index=channel_index) signal.lazy_shape = None else: arr = self._extract_array(data, channel_index) if len(arr) > 0: signal = AnalogSignal(arr, units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms, channel_index=channel_index) if signal is not None: signal.annotate(label=metadata["label"], variable=metadata["variable"]) return signal
def _extract_signals(self, data, metadata, lazy): signal = None if lazy and data.size > 0: signal = AnalogSignal([], units=self._determine_units(metadata), sampling_period=metadata['dt'] * pq.ms) signal.lazy_shape = None else: arr = numpy.vstack( self._extract_array(data, channel_index) for channel_index in range(metadata['first_index'], metadata['last_index'] + 1)) if len(arr) > 0: signal = AnalogSignal(arr.T, units=self._determine_units(metadata), sampling_period=metadata['dt'] * pq.ms) if signal is not None: signal.annotate(label=metadata["label"], variable=metadata["variable"]) return signal
def _extract_signal(self, data, metadata, channel_index, lazy): signal = None if lazy: if channel_index in data[:, 1]: signal = AnalogSignal([], units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms, channel_index=channel_index) signal.lazy_shape = None else: arr = self._extract_array(data, channel_index) if len(arr) > 0: signal = AnalogSignal(arr, units=self._determine_units(metadata), sampling_period=metadata['dt']*pq.ms, channel_index=channel_index) if signal is not None: signal.annotate(label=metadata["label"], variable=metadata["variable"]) return signal
def read_analogsignal(self, lazy=False, cascade=True): if not HAVE_IGOR: raise Exception("igor package not installed. Try `pip install igor`") data = bw.load(self.filename) version = data['version'] if version > 3: raise IOError("Igor binary wave file format version {0} is not supported.".format(version)) content = data['wave'] if "padding" in content: assert content['padding'].size == 0, "Cannot handle non-empty padding" if lazy: # not really lazy, since the `igor` module loads the data anyway signal = np.array((), dtype=content['wData'].dtype) else: signal = content['wData'] note = content['note'] header = content['wave_header'] name = header['bname'] assert header['botFullScale'] == 0 assert header['topFullScale'] == 0 units = "".join(header['dataUnits']) time_units = "".join(header['xUnits']) or "s" t_start = pq.Quantity(header['hsB'], time_units) sampling_period = pq.Quantity(header['hsA'], time_units) if self.parse_notes: try: annotations = self.parse_notes(note) except ValueError: warn("Couldn't parse notes field.") annotations = {'note': note} else: annotations = {'note': note} signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start, sampling_period=sampling_period, name=name, file_origin=self.filename, **annotations) if lazy: signal.lazy_shape = content['wData'].shape return signal
def read_analogsignal(self, channel_index=None, lazy=False, cascade=True): """ Read raw traces Arguments: channel_index: must be integer """ try: channel_index = int(channel_index) except TypeError: print('channel_index must be int, not %s' % type(channel_index)) bit_volts = self._attrs['app_data']['channel_bit_volts'] sig_unit = 'uV' if lazy: anasig = AnalogSignal( [], units=sig_unit, sampling_rate=self._attrs['kwe']['sample_rate'] * pq.Hz, t_start=self._attrs['kwe']['start_time'] * pq.s, channel_index=channel_index, name=self._nodes['Rhythm FPGA'][0]['chanNames'][channel_index]) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = self._attrs['shape'][0] else: data = self._kwd['recordings'][str( self._dataset)]['data'].value[:, channel_index] data = data * bit_volts[channel_index] anasig = AnalogSignal( data, units=sig_unit, sampling_rate=self._attrs['kwe']['sample_rate'] * pq.Hz, t_start=self._attrs['kwe']['start_time'] * pq.s, channel_index=channel_index, name=self._nodes['Rhythm FPGA'][0]['chanNames'][channel_index]) data = [] # delete from memory # for attributes out of neo you can annotate anasig.annotate(info='raw trace') return anasig
def read_segment(self, lazy=False, cascade=True, load_spike_waveform=True): """ Read in a segment. Arguments: load_spike_waveform : load or not waveform of spikes (default True) """ fid = open(self.filename, 'rb') globalHeader = HeaderReader(fid, GlobalHeader).read_f(offset=0) # metadatas seg = Segment() seg.rec_datetime = datetime.datetime( globalHeader.pop('Year'), globalHeader.pop('Month'), globalHeader.pop('Day'), globalHeader.pop('Hour'), globalHeader.pop('Minute'), globalHeader.pop('Second') ) seg.file_origin = os.path.basename(self.filename) for key, val in globalHeader.iteritems(): seg.annotate(**{key: val}) if not cascade: return seg ## Step 1 : read headers # dsp channels header = spikes and waveforms dspChannelHeaders = {} maxunit = 0 maxchan = 0 for _ in range(globalHeader['NumDSPChannels']): # channel is 1 based channelHeader = HeaderReader(fid, ChannelHeader).read_f(offset=None) channelHeader['Template'] = np.array(channelHeader['Template']).reshape((5,64)) channelHeader['Boxes'] = np.array(channelHeader['Boxes']).reshape((5,2,4)) dspChannelHeaders[channelHeader['Channel']] = channelHeader maxunit = max(channelHeader['NUnits'], maxunit) maxchan = max(channelHeader['Channel'], maxchan) # event channel header eventHeaders = { } for _ in range(globalHeader['NumEventChannels']): eventHeader = HeaderReader(fid, EventHeader).read_f(offset=None) eventHeaders[eventHeader['Channel']] = eventHeader # slow channel header = signal slowChannelHeaders = {} for _ in range(globalHeader['NumSlowChannels']): slowChannelHeader = HeaderReader(fid, SlowChannelHeader).read_f(offset=None) slowChannelHeaders[slowChannelHeader['Channel']] = slowChannelHeader ## Step 2 : a first loop for counting size # signal nb_samples = np.zeros(len(slowChannelHeaders)) sample_positions = np.zeros(len(slowChannelHeaders)) t_starts = np.zeros(len(slowChannelHeaders), dtype='f') #spiketimes and waveform nb_spikes = np.zeros((maxchan+1, maxunit+1) ,dtype='i') wf_sizes = np.zeros((maxchan+1, maxunit+1, 2) ,dtype='i') # eventarrays nb_events = { } #maxstrsizeperchannel = { } for chan, h in iteritems(eventHeaders): nb_events[chan] = 0 #maxstrsizeperchannel[chan] = 0 start = fid.tell() while fid.tell() !=-1 : # read block header dataBlockHeader = HeaderReader(fid , DataBlockHeader ).read_f(offset = None) if dataBlockHeader is None : break chan = dataBlockHeader['Channel'] unit = dataBlockHeader['Unit'] n1,n2 = dataBlockHeader['NumberOfWaveforms'] , dataBlockHeader['NumberOfWordsInWaveform'] time = (dataBlockHeader['UpperByteOf5ByteTimestamp']*2.**32 + dataBlockHeader['TimeStamp']) if dataBlockHeader['Type'] == 1: nb_spikes[chan,unit] +=1 wf_sizes[chan,unit,:] = [n1,n2] fid.seek(n1*n2*2,1) elif dataBlockHeader['Type'] ==4: #event nb_events[chan] += 1 elif dataBlockHeader['Type'] == 5: #continuous signal fid.seek(n2*2, 1) if n2> 0: nb_samples[chan] += n2 if nb_samples[chan] ==0: t_starts[chan] = time ## Step 3: allocating memory and 2 loop for reading if not lazy if not lazy: # allocating mem for signal sigarrays = { } for chan, h in iteritems(slowChannelHeaders): sigarrays[chan] = np.zeros(nb_samples[chan]) # allocating mem for SpikeTrain stimearrays = np.zeros((maxchan+1, maxunit+1) ,dtype=object) swfarrays = np.zeros((maxchan+1, maxunit+1) ,dtype=object) for (chan, unit), _ in np.ndenumerate(nb_spikes): stimearrays[chan,unit] = np.zeros(nb_spikes[chan,unit], dtype = 'f') if load_spike_waveform: n1,n2 = wf_sizes[chan, unit,:] swfarrays[chan, unit] = np.zeros( (nb_spikes[chan, unit], n1, n2 ) , dtype = 'f4' ) pos_spikes = np.zeros(nb_spikes.shape, dtype = 'i') # allocating mem for event eventpositions = { } evarrays = { } for chan, nb in iteritems(nb_events): evarrays[chan] = { 'times': np.zeros(nb, dtype='f'), 'labels': np.zeros(nb, dtype='S4') } eventpositions[chan]=0 fid.seek(start) while fid.tell() !=-1 : dataBlockHeader = HeaderReader(fid , DataBlockHeader ).read_f(offset = None) if dataBlockHeader is None : break chan = dataBlockHeader['Channel'] n1,n2 = dataBlockHeader['NumberOfWaveforms'] , dataBlockHeader['NumberOfWordsInWaveform'] time = dataBlockHeader['UpperByteOf5ByteTimestamp']*2.**32 + dataBlockHeader['TimeStamp'] time/= globalHeader['ADFrequency'] if n2 <0: break if dataBlockHeader['Type'] == 1: #spike unit = dataBlockHeader['Unit'] pos = pos_spikes[chan,unit] stimearrays[chan, unit][pos] = time if load_spike_waveform and n1*n2 != 0 : swfarrays[chan,unit][pos,:,:] = np.fromstring( fid.read(n1*n2*2) , dtype = 'i2').reshape(n1,n2).astype('f4') else: fid.seek(n1*n2*2,1) pos_spikes[chan,unit] +=1 elif dataBlockHeader['Type'] == 4: # event pos = eventpositions[chan] evarrays[chan]['times'][pos] = time evarrays[chan]['labels'][pos] = dataBlockHeader['Unit'] eventpositions[chan]+= 1 elif dataBlockHeader['Type'] == 5: #signal data = np.fromstring( fid.read(n2*2) , dtype = 'i2').astype('f4') sigarrays[chan][sample_positions[chan] : sample_positions[chan]+data.size] = data sample_positions[chan] += data.size ## Step 4: create neo object for chan, h in iteritems(eventHeaders): if lazy: times = [] labels = None else: times = evarrays[chan]['times'] labels = evarrays[chan]['labels'] ea = EventArray( times*pq.s, labels=labels, channel_name=eventHeaders[chan]['Name'], channel_index=chan ) if lazy: ea.lazy_shape = nb_events[chan] seg.eventarrays.append(ea) for chan, h in iteritems(slowChannelHeaders): if lazy: signal = [ ] else: if globalHeader['Version'] ==100 or globalHeader['Version'] ==101 : gain = 5000./(2048*slowChannelHeaders[chan]['Gain']*1000.) elif globalHeader['Version'] ==102 : gain = 5000./(2048*slowChannelHeaders[chan]['Gain']*slowChannelHeaders[chan]['PreampGain']) elif globalHeader['Version'] >= 103: gain = globalHeader['SlowMaxMagnitudeMV']/(.5*(2**globalHeader['BitsPerSpikeSample'])*\ slowChannelHeaders[chan]['Gain']*slowChannelHeaders[chan]['PreampGain']) signal = sigarrays[chan]*gain anasig = AnalogSignal(signal*pq.V, sampling_rate = float(slowChannelHeaders[chan]['ADFreq'])*pq.Hz, t_start = t_starts[chan]*pq.s, channel_index = slowChannelHeaders[chan]['Channel'], channel_name = slowChannelHeaders[chan]['Name'], ) if lazy: anasig.lazy_shape = nb_samples[chan] seg.analogsignals.append(anasig) for (chan, unit), value in np.ndenumerate(nb_spikes): if nb_spikes[chan, unit] == 0: continue if lazy: times = [ ] waveforms = None t_stop = 0 else: times = stimearrays[chan,unit] t_stop = times.max() if load_spike_waveform: if globalHeader['Version'] <103: gain = 3000./(2048*dspChannelHeaders[chan]['Gain']*1000.) elif globalHeader['Version'] >=103 and globalHeader['Version'] <105: gain = globalHeader['SpikeMaxMagnitudeMV']/(.5*2.**(globalHeader['BitsPerSpikeSample'])*1000.) elif globalHeader['Version'] >105: gain = globalHeader['SpikeMaxMagnitudeMV']/(.5*2.**(globalHeader['BitsPerSpikeSample'])*globalHeader['SpikePreAmpGain']) waveforms = swfarrays[chan, unit] * gain * pq.V else: waveforms = None sptr = SpikeTrain( times, units='s', t_stop=t_stop*pq.s, waveforms=waveforms ) sptr.annotate(unit_name = dspChannelHeaders[chan]['Name']) sptr.annotate(channel_index = chan) for key, val in dspChannelHeaders[chan].iteritems(): sptr.annotate(**{key: val}) if lazy: sptr.lazy_shape = nb_spikes[chan,unit] seg.spiketrains.append(sptr) seg.create_many_to_one_relationship() return seg
def read_block(self, lazy=False, cascade=True): header = self.read_header() version = header['fFileVersionNumber'] bl = Block() bl.file_origin = os.path.basename(self.filename) bl.annotate(abf_version=str(version)) # date and time if version < 2.: YY = 1900 MM = 1 DD = 1 hh = int(header['lFileStartTime'] / 3600.) mm = int((header['lFileStartTime'] - hh * 3600) / 60) ss = header['lFileStartTime'] - hh * 3600 - mm * 60 ms = int(np.mod(ss, 1) * 1e6) ss = int(ss) elif version >= 2.: YY = int(header['uFileStartDate'] / 10000) MM = int((header['uFileStartDate'] - YY * 10000) / 100) DD = int(header['uFileStartDate'] - YY * 10000 - MM * 100) hh = int(header['uFileStartTimeMS'] / 1000. / 3600.) mm = int((header['uFileStartTimeMS'] / 1000. - hh * 3600) / 60) ss = header['uFileStartTimeMS'] / 1000. - hh * 3600 - mm * 60 ms = int(np.mod(ss, 1) * 1e6) ss = int(ss) bl.rec_datetime = datetime.datetime(YY, MM, DD, hh, mm, ss, ms) if not cascade: return bl # file format if header['nDataFormat'] == 0: dt = np.dtype('i2') elif header['nDataFormat'] == 1: dt = np.dtype('f4') if version < 2.: nbchannel = header['nADCNumChannels'] head_offset = header['lDataSectionPtr'] * BLOCKSIZE + header[ 'nNumPointsIgnored'] * dt.itemsize totalsize = header['lActualAcqLength'] elif version >= 2.: nbchannel = header['sections']['ADCSection']['llNumEntries'] head_offset = header['sections']['DataSection'][ 'uBlockIndex'] * BLOCKSIZE totalsize = header['sections']['DataSection']['llNumEntries'] data = np.memmap(self.filename, dt, 'r', shape=(totalsize,), offset=head_offset) # 3 possible modes if version < 2.: mode = header['nOperationMode'] elif version >= 2.: mode = header['protocol']['nOperationMode'] if (mode == 1) or (mode == 2) or (mode == 5) or (mode == 3): # event-driven variable-length mode (mode 1) # event-driven fixed-length mode (mode 2 or 5) # gap free mode (mode 3) can be in several episodes # read sweep pos if version < 2.: nbepisod = header['lSynchArraySize'] offset_episode = header['lSynchArrayPtr'] * BLOCKSIZE elif version >= 2.: nbepisod = header['sections']['SynchArraySection'][ 'llNumEntries'] offset_episode = header['sections']['SynchArraySection'][ 'uBlockIndex'] * BLOCKSIZE if nbepisod > 0: episode_array = np.memmap( self.filename, [('offset', 'i4'), ('len', 'i4')], 'r', shape=nbepisod, offset=offset_episode) else: episode_array = np.empty(1, [('offset', 'i4'), ('len', 'i4')]) episode_array[0]['len'] = data.size episode_array[0]['offset'] = 0 # sampling_rate if version < 2.: sampling_rate = 1. / (header['fADCSampleInterval'] * nbchannel * 1.e-6) * pq.Hz elif version >= 2.: sampling_rate = 1.e6 / \ header['protocol']['fADCSequenceInterval'] * pq.Hz # construct block # one sweep = one segment in a block pos = 0 for j in range(episode_array.size): seg = Segment(index=j) length = episode_array[j]['len'] if version < 2.: fSynchTimeUnit = header['fSynchTimeUnit'] elif version >= 2.: fSynchTimeUnit = header['protocol']['fSynchTimeUnit'] if (fSynchTimeUnit != 0) and (mode == 1): length /= fSynchTimeUnit if not lazy: subdata = data[pos:pos+length] subdata = subdata.reshape((int(subdata.size/nbchannel), nbchannel)).astype('f') if dt == np.dtype('i2'): if version < 2.: reformat_integer_v1(subdata, nbchannel, header) elif version >= 2.: reformat_integer_v2(subdata, nbchannel, header) pos += length if version < 2.: chans = [chan_num for chan_num in header['nADCSamplingSeq'] if chan_num >= 0] else: chans = range(nbchannel) for n, i in enumerate(chans[:nbchannel]): # fix SamplingSeq if version < 2.: name = header['sADCChannelName'][i].replace(b' ', b'') unit = header['sADCUnits'][i].replace(b'\xb5', b'u').\ replace(b' ', b'').decode('utf-8') # \xb5 is µ num = header['nADCPtoLChannelMap'][i] elif version >= 2.: lADCIi = header['listADCInfo'][i] name = lADCIi['ADCChNames'].replace(b' ', b'') unit = lADCIi['ADCChUnits'].replace(b'\xb5', b'u').\ replace(b' ', b'').decode('utf-8') num = header['listADCInfo'][i]['nADCNum'] if (fSynchTimeUnit == 0): t_start = float(episode_array[j]['offset']) / sampling_rate else: t_start = float(episode_array[j]['offset']) * fSynchTimeUnit *1e-6* pq.s t_start = t_start.rescale('s') try: pq.Quantity(1, unit) except: unit = '' if lazy: signal = [] * pq.Quantity(1, unit) else: signal = pq.Quantity(subdata[:, n], unit) anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, name=str(name), channel_index=int(num)) if lazy: anaSig.lazy_shape = length / nbchannel seg.analogsignals.append(anaSig) bl.segments.append(seg) if mode in [3, 5]: # TODO check if tags exits in other mode # tag is EventArray that should be attached to Block # It is attched to the first Segment times = [] labels = [] comments = [] for i, tag in enumerate(header['listTag']): times.append(tag['lTagTime']/sampling_rate) labels.append(str(tag['nTagType'])) comments.append(clean_string(tag['sComment'])) times = np.array(times) labels = np.array(labels, dtype='S') comments = np.array(comments, dtype='S') # attach all tags to the first segment. seg = bl.segments[0] if lazy: ea = Event(times=[] * pq.s, labels=np.array([], dtype='S')) ea.lazy_shape = len(times) else: ea = Event(times=times * pq.s, labels=labels, comments=comments) seg.events.append(ea) bl.create_many_to_one_relationship() return bl
def read_segment( self, cascade=True, lazy=False, ): """ Arguments: """ f = struct_file(self.filename, 'rb') #Name f.seek(64, 0) surname = f.read(22) while surname[-1] == ' ': if len(surname) == 0: break surname = surname[:-1] firstname = f.read(20) while firstname[-1] == ' ': if len(firstname) == 0: break firstname = firstname[:-1] #Date f.seek(128, 0) day, month, year, hour, minute, sec = f.read_f('bbbbbb') rec_datetime = datetime.datetime(year + 1900, month, day, hour, minute, sec) f.seek(138, 0) Data_Start_Offset, Num_Chan, Multiplexer, Rate_Min, Bytes = f.read_f( 'IHHHH') #~ print Num_Chan, Bytes #header version f.seek(175, 0) header_version, = f.read_f('b') assert header_version == 4 seg = Segment( name=firstname + ' ' + surname, file_origin=os.path.basename(self.filename), ) seg.annotate(surname=surname) seg.annotate(firstname=firstname) seg.annotate(rec_datetime=rec_datetime) if not cascade: return seg # area f.seek(176, 0) zone_names = [ 'ORDER', 'LABCOD', 'NOTE', 'FLAGS', 'TRONCA', 'IMPED_B', 'IMPED_E', 'MONTAGE', 'COMPRESS', 'AVERAGE', 'HISTORY', 'DVIDEO', 'EVENT A', 'EVENT B', 'TRIGGER' ] zones = {} for zname in zone_names: zname2, pos, length = f.read_f('8sII') zones[zname] = zname2, pos, length #~ print zname2, pos, length # reading raw data if not lazy: f.seek(Data_Start_Offset, 0) rawdata = np.fromstring(f.read(), dtype='u' + str(Bytes)) rawdata = rawdata.reshape((rawdata.size / Num_Chan, Num_Chan)) # Reading Code Info zname2, pos, length = zones['ORDER'] f.seek(pos, 0) code = np.fromfile(f, dtype='u2', count=Num_Chan) units = { -1: pq.nano * pq.V, 0: pq.uV, 1: pq.mV, 2: 1, 100: pq.percent, 101: pq.dimensionless, 102: pq.dimensionless } for c in range(Num_Chan): zname2, pos, length = zones['LABCOD'] f.seek(pos + code[c] * 128 + 2, 0) label = f.read(6).strip("\x00") ground = f.read(6).strip("\x00") logical_min, logical_max, logical_ground, physical_min, physical_max = f.read_f( 'iiiii') k, = f.read_f('h') if k in units.keys(): unit = units[k] else: unit = pq.uV f.seek(8, 1) sampling_rate, = f.read_f('H') * pq.Hz sampling_rate *= Rate_Min if lazy: signal = [] * unit else: factor = float(physical_max - physical_min) / float(logical_max - logical_min + 1) signal = (rawdata[:, c].astype('f') - logical_ground) * factor * unit anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, name=label, channel_index=c) if lazy: anaSig.lazy_shape = None anaSig.annotate(ground=ground) seg.analogsignals.append(anaSig) sampling_rate = np.mean( [anaSig.sampling_rate for anaSig in seg.analogsignals]) * pq.Hz # Read trigger and notes for zname, label_dtype in [('TRIGGER', 'u2'), ('NOTE', 'S40')]: zname2, pos, length = zones[zname] f.seek(pos, 0) triggers = np.fromstring( f.read(length), dtype=[('pos', 'u4'), ('label', label_dtype)], ) ea = EventArray(name=zname[0] + zname[1:].lower()) if not lazy: keep = (triggers['pos'] >= triggers['pos'][0]) & ( triggers['pos'] < rawdata.shape[0]) & (triggers['pos'] != 0) triggers = triggers[keep] ea.labels = triggers['label'].astype('S') ea.times = (triggers['pos'] / sampling_rate).rescale('s') else: ea.lazy_shape = triggers.size seg.eventarrays.append(ea) # Read Event A and B # Not so well tested for zname in ['EVENT A', 'EVENT B']: zname2, pos, length = zones[zname] f.seek(pos, 0) epochs = np.fromstring(f.read(length), dtype=[ ('label', 'u4'), ('start', 'u4'), ('stop', 'u4'), ]) ep = EpochArray(name=zname[0] + zname[1:].lower()) if not lazy: keep = (epochs['start'] > 0) & ( epochs['start'] < rawdata.shape[0]) & (epochs['stop'] < rawdata.shape[0]) epochs = epochs[keep] ep.labels = epochs['label'].astype('S') ep.times = (epochs['start'] / sampling_rate).rescale('s') ep.durations = ((epochs['stop'] - epochs['start']) / sampling_rate).rescale('s') else: ep.lazy_shape = triggers.size seg.epocharrays.append(ep) seg.create_many_to_one_relationship() return seg
def read_one_channel_continuous(self, fid, channel_num, header, take_ideal_sampling_rate, lazy=True): # read AnalogSignal channelHeader = header.channelHeaders[channel_num] # data type if channelHeader.kind == 1: dt = np.dtype('i2') elif channelHeader.kind == 9: dt = np.dtype('f4') # sample rate if take_ideal_sampling_rate: sampling_rate = channelHeader.ideal_rate * pq.Hz else: if header.system_id in [1, 2, 3, 4, 5]: # Before version 5 #~ print channel_num, channelHeader.divide, \ #~ header.us_per_time, header.time_per_adc sample_interval = (channelHeader.divide * header.us_per_time * header.time_per_adc) * 1e-6 else: sample_interval = (channelHeader.l_chan_dvd * header.us_per_time * header.dtime_base) sampling_rate = (1. / sample_interval) * pq.Hz # read blocks header to preallocate memory by jumping block to block if channelHeader.blocks==0: return [ ] fid.seek(channelHeader.firstblock) blocksize = [0] starttimes = [] for b in range(channelHeader.blocks): blockHeader = HeaderReader(fid, np.dtype(blockHeaderDesciption)) if len(blocksize) > len(starttimes): starttimes.append(blockHeader.start_time) blocksize[-1] += blockHeader.items if blockHeader.succ_block > 0: # ugly but CED does not guarantee continuity in AnalogSignal fid.seek(blockHeader.succ_block) nextBlockHeader = HeaderReader(fid, np.dtype(blockHeaderDesciption)) sample_interval = (blockHeader.end_time - blockHeader.start_time) / \ (blockHeader.items - 1) interval_with_next = nextBlockHeader.start_time - \ blockHeader.end_time if interval_with_next > sample_interval: blocksize.append(0) fid.seek(blockHeader.succ_block) ana_sigs = [] if channelHeader.unit in unit_convert: unit = pq.Quantity(1, unit_convert[channelHeader.unit]) else: # print channelHeader.unit try: unit = pq.Quantity(1, channelHeader.unit) except: unit = pq.Quantity(1, '') for b, bs in enumerate(blocksize): if lazy: signal = [] * unit else: signal = pq.Quantity(np.empty(bs, dtype='f4'), units=unit) ana_sig = AnalogSignal( signal, sampling_rate=sampling_rate, t_start=(starttimes[b] * header.us_per_time * header.dtime_base * pq.s), channel_index=channel_num) ana_sigs.append(ana_sig) if lazy: for s, ana_sig in enumerate(ana_sigs): ana_sig.lazy_shape = blocksize[s] else: # read data by jumping block to block fid.seek(channelHeader.firstblock) pos = 0 numblock = 0 for b in range(channelHeader.blocks): blockHeader = HeaderReader( fid, np.dtype(blockHeaderDesciption)) # read data sig = np.fromstring(fid.read(blockHeader.items * dt.itemsize), dtype=dt) ana_sigs[numblock][pos:pos + sig.size] = \ sig.reshape(-1, 1).astype('f4') * unit pos += sig.size if pos >= blocksize[numblock]: numblock += 1 pos = 0 # jump to next block if blockHeader.succ_block > 0: fid.seek(blockHeader.succ_block) # convert for int16 if dt.kind == 'i': for ana_sig in ana_sigs: ana_sig *= channelHeader.scale / 6553.6 ana_sig += channelHeader.offset * unit return ana_sigs
def read_nsx(self, filename_nsx, seg, lazy, cascade): # basic header dt0 = [('header_id','S8'), ('ver_major','uint8'), ('ver_minor','uint8'), ('header_size', 'uint32'), #i.e. index of first data ('group_label', 'S16'),# Read number of packet bytes, i.e. byte per samplepos ('comments', 'S256'), ('period_ratio', 'uint32'), ('sampling_rate', 'uint32'), ('window_datetime', 'S16'), ('nb_channel', 'uint32'), ] nsx_header = h = np.fromfile(filename_nsx, count = 1, dtype = dt0)[0] version = '{0}.{1}'.format(h['ver_major'], h['ver_minor']) seg.annotate(blackrock_version = version) seg.rec_datetime = get_window_datetime(nsx_header['window_datetime']) nb_channel = h['nb_channel'] sr = float(h['sampling_rate'])/h['period_ratio'] if not cascade: return # extended header = channel information dt1 = [('header_id','S2'), ('channel_id', 'uint16'), ('label', 'S16'), ('connector_id', 'uint8'), ('connector_pin', 'uint8'), ('min_digital_val', 'int16'), ('max_digital_val', 'int16'), ('min_analog_val', 'int16'), ('max_analog_val', 'int16'), ('units', 'S16'), ('hi_freq_corner', 'uint32'), ('hi_freq_order', 'uint32'), ('hi_freq_type', 'uint16'), #0=None 1=Butterworth ('lo_freq_corner', 'uint32'), ('lo_freq_order', 'uint32'), ('lo_freq_type', 'uint16'), #0=None 1=Butterworth ] channels_header = ch= np.memmap(filename_nsx, shape = nb_channel, offset = np.dtype(dt0).itemsize, dtype = dt1) # read data dt2 = [('header_id','uint8'), ('n_start','uint32'), ('nb_sample','uint32'), ] sample_header = sh = np.memmap(filename_nsx, dtype = dt2, shape = 1, offset = nsx_header['header_size'])[0] nb_sample = sample_header['nb_sample'] data = np.memmap(filename_nsx, dtype = 'int16', shape = (nb_sample, nb_channel), offset = nsx_header['header_size'] +np.dtype(dt2).itemsize ) # create new objects for i in range(nb_channel): unit = channels_header['units'][i].decode() if lazy: sig = [ ] else: sig = data[:,i].astype(float) # dig value to physical value if ch['max_analog_val'][i] == -ch['min_analog_val'][i] and\ ch['max_digital_val'][i] == -ch['min_digital_val'][i]: # when symmetric it is simple sig *= float(ch['max_analog_val'][i])/float(ch['max_digital_val'][i]) else: # general case sig -= ch['min_digital_val'][i] sig *= float(ch['max_analog_val'][i] - ch['min_analog_val'])/\ float(ch['max_digital_val'][i] - ch['min_digital_val']) sig += float(ch['min_analog_val'][i]) anasig = AnalogSignal(signal = pq.Quantity(sig,unit, copy = False), sampling_rate = sr*pq.Hz, t_start = sample_header['n_start']/sr*pq.s, name = str(ch['label'][i]), channel_index = int(ch['channel_id'][i])) if lazy: anasig.lazy_shape = nb_sample seg.analogsignals.append(anasig)
def read_analogsignal(self, path=None, lazy=False, cascade=True): if not HAVE_IGOR: raise Exception(("`igor` package not installed. " "Try `pip install igor`")) if self.extension == 'ibw': data = bw.load(self.filename) version = data['version'] if version > 5: raise IOError(("Igor binary wave file format version {0} " "is not supported.".format(version))) elif self.extension == 'pxp': assert type(path) is str, \ "A colon-separated Igor-style path must be provided." _,filesystem = pxp.load(self.filename) path = path.split(':') location = filesystem['root'] for element in path: if element != 'root': location = location[element.encode('utf8')] data = location.wave content = data['wave'] if "padding" in content: assert content['padding'].size == 0, \ "Cannot handle non-empty padding" if lazy: # not really lazy, since the `igor` module loads the data anyway signal = np.array((), dtype=content['wData'].dtype) else: signal = content['wData'] note = content['note'] header = content['wave_header'] name = header['bname'] units = "".join([x.decode() for x in header['dataUnits']]) try: time_units = "".join([x.decode() for x in header['xUnits']]) assert len(time_units) except: time_units = "s" try: t_start = pq.Quantity(header['hsB'], time_units) except KeyError: t_start = pq.Quantity(header['sfB'][0], time_units) try: sampling_period = pq.Quantity(header['hsA'], time_units) except: sampling_period = pq.Quantity(header['sfA'][0], time_units) if self.parse_notes: try: annotations = self.parse_notes(note) except ValueError: warn("Couldn't parse notes field.") annotations = {'note': note} else: annotations = {'note': note} signal = AnalogSignal(signal, units=units, copy=False, t_start=t_start, sampling_period=sampling_period, name=name, file_origin=self.filename, **annotations) if lazy: signal.lazy_shape = content['wData'].shape return signal
def read_segment(self, cascade = True, lazy = False, sampling_rate = 1.*pq.Hz, t_start = 0.*pq.s, unit = pq.V, nbchannel = 1, bytesoffset = 0, dtype = 'f4', rangemin = -10, rangemax = 10, ): """ Reading signal in a raw binary interleaved compact file. Arguments: sampling_rate : sample rate t_start : time of the first sample sample of each channel unit: unit of AnalogSignal can be a str or directly a Quantities nbchannel : number of channel bytesoffset : nb of bytes offset at the start of file dtype : dtype of the data rangemin , rangemax : if the dtype is integer, range can give in volt the min and the max of the range """ seg = Segment(file_origin = os.path.basename(self.filename)) if not cascade: return seg dtype = np.dtype(dtype) if type(sampling_rate) == float or type(sampling_rate)==int: # if not quantitities Hz by default sampling_rate = sampling_rate*pq.Hz if type(t_start) == float or type(t_start)==int: # if not quantitities s by default t_start = t_start*pq.s unit = pq.Quantity(1, unit) if not lazy: sig = np.memmap(self.filename, dtype = dtype, mode = 'r', offset = bytesoffset) if sig.size % nbchannel != 0 : sig = sig[:- sig.size%nbchannel] sig = sig.reshape((sig.size/nbchannel,nbchannel)) if dtype.kind == 'i' : sig = sig.astype('f') sig /= 2**(8*dtype.itemsize) sig *= ( rangemax-rangemin ) sig += ( rangemax+rangemin )/2. elif dtype.kind == 'u' : sig = sig.astype('f') sig /= 2**(8*dtype.itemsize) sig *= ( rangemax-rangemin ) sig += rangemin sig_with_units = pq.Quantity(sig, units=unit, copy = False) for i in range(nbchannel) : if lazy: signal = [ ]*unit else: signal = sig_with_units[:,i] anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, channel_index=i, copy = False) if lazy: # TODO anaSig.lazy_shape = None seg.analogsignals.append(anaSig) seg.create_many_to_one_relationship() return seg
def read_segment(self, lazy=False, cascade=True): ## Read header file f = open(self.filename + '.ent', 'rU') #version version = f.readline() if version[:2] != 'V2' and version[:2] != 'V3': # raise('read only V2 .eeg.ent files') raise VersionError('Read only V2 or V3 .eeg.ent files. %s given' % version[:2]) return #info info1 = f.readline()[:-1] info2 = f.readline()[:-1] # strange 2 line for datetime #line1 l = f.readline() r1 = re.findall('(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', l) r2 = re.findall('(\d+):(\d+):(\d+)', l) r3 = re.findall('(\d+)-(\d+)-(\d+)', l) YY, MM, DD, hh, mm, ss = (None, ) * 6 if len(r1) != 0: DD, MM, YY, hh, mm, ss = r1[0] elif len(r2) != 0: hh, mm, ss = r2[0] elif len(r3) != 0: DD, MM, YY = r3[0] #line2 l = f.readline() r1 = re.findall('(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', l) r2 = re.findall('(\d+):(\d+):(\d+)', l) r3 = re.findall('(\d+)-(\d+)-(\d+)', l) if len(r1) != 0: DD, MM, YY, hh, mm, ss = r1[0] elif len(r2) != 0: hh, mm, ss = r2[0] elif len(r3) != 0: DD, MM, YY = r3[0] try: fulldatetime = datetime.datetime(int(YY), int(MM), int(DD), int(hh), int(mm), int(ss)) except: fulldatetime = None seg = Segment( file_origin=os.path.basename(self.filename), elan_version=version, info1=info1, info2=info2, rec_datetime=fulldatetime, ) if not cascade: return seg l = f.readline() l = f.readline() l = f.readline() # sampling rate sample l = f.readline() sampling_rate = 1. / float(l) * pq.Hz # nb channel l = f.readline() nbchannel = int(l) - 2 #channel label labels = [] for c in range(nbchannel + 2): labels.append(f.readline()[:-1]) # channel type types = [] for c in range(nbchannel + 2): types.append(f.readline()[:-1]) # channel unit units = [] for c in range(nbchannel + 2): units.append(f.readline()[:-1]) #print units #range min_physic = [] for c in range(nbchannel + 2): min_physic.append(float(f.readline())) max_physic = [] for c in range(nbchannel + 2): max_physic.append(float(f.readline())) min_logic = [] for c in range(nbchannel + 2): min_logic.append(float(f.readline())) max_logic = [] for c in range(nbchannel + 2): max_logic.append(float(f.readline())) #info filter info_filter = [] for c in range(nbchannel + 2): info_filter.append(f.readline()[:-1]) f.close() #raw data n = int(round(np.log(max_logic[0] - min_logic[0]) / np.log(2)) / 8) data = np.fromfile(self.filename, dtype='i' + str(n)) data = data.byteswap().reshape( (data.size / (nbchannel + 2), nbchannel + 2)).astype('f4') for c in range(nbchannel): if lazy: sig = [] else: sig = (data[:,c]-min_logic[c])/(max_logic[c]-min_logic[c])*\ (max_physic[c]-min_physic[c])+min_physic[c] try: unit = pq.Quantity(1, units[c]) except: unit = pq.Quantity(1, '') anaSig = AnalogSignal(sig * unit, sampling_rate=sampling_rate, t_start=0. * pq.s, name=labels[c], channel_index=c) if lazy: anaSig.lazy_shape = data.shape[0] anaSig.annotate(channel_name=labels[c]) seg.analogsignals.append(anaSig) # triggers f = open(self.filename + '.pos') times = [] labels = [] reject_codes = [] for l in f.readlines(): r = re.findall(' *(\d+) *(\d+) *(\d+) *', l) times.append(float(r[0][0]) / sampling_rate.magnitude) labels.append(str(r[0][1])) reject_codes.append(str(r[0][2])) if lazy: times = [] * pq.S labels = np.array([], dtype='S') reject_codes = [] else: times = np.array(times) * pq.s labels = np.array(labels) reject_codes = np.array(reject_codes) ea = EventArray( times=times, labels=labels, reject_codes=reject_codes, ) if lazy: ea.lazy_shape = len(times) seg.eventarrays.append(ea) f.close() seg.create_many_to_one_relationship() return seg
def read_segment(self, lazy = False, cascade = True, delimiter = '\t', usecols = None, skiprows =0, timecolumn = None, sampling_rate = 1.*pq.Hz, t_start = 0.*pq.s, unit = pq.V, method = 'genfromtxt', ): """ Arguments: delimiter : columns delimiter in file '\t' or one space or two space or ',' or ';' usecols : if None take all columns otherwise a list for selected columns skiprows : skip n first lines in case they contains header informations timecolumn : None or a valid int that point the time vector samplerate : the samplerate of signals if timecolumn is not None this is not take in account t_start : time of the first sample unit : unit of AnalogSignal can be a str or directly a Quantities method : 'genfromtxt' or 'csv' or 'homemade' in case of bugs you can try one of this methods 'genfromtxt' use numpy.genfromtxt 'csv' use cvs module 'homemade' use a intuitive more robust but slow method """ seg = Segment(file_origin = os.path.basename(self.filename)) if not cascade: return seg if type(sampling_rate) == float or type(sampling_rate)==int: # if not quantitities Hz by default sampling_rate = sampling_rate*pq.Hz if type(t_start) == float or type(t_start)==int: # if not quantitities s by default t_start = t_start*pq.s unit = pq.Quantity(1, unit) #loadtxt if method == 'genfromtxt' : sig = np.genfromtxt(self.filename, delimiter = delimiter, usecols = usecols , skip_header = skiprows, dtype = 'f') if len(sig.shape) ==1: sig = sig[:, np.newaxis] elif method == 'csv' : tab = [l for l in csv.reader( file(self.filename,'rU') , delimiter = delimiter ) ] tab = tab[skiprows:] sig = np.array( tab , dtype = 'f') elif method == 'homemade' : fid = open(self.filename,'rU') for l in range(skiprows): fid.readline() tab = [ ] for line in fid.readlines(): line = line.replace('\r','') line = line.replace('\n','') l = line.split(delimiter) while '' in l : l.remove('') tab.append(l) sig = np.array( tab , dtype = 'f') if timecolumn is not None: sampling_rate = 1./np.mean(np.diff(sig[:,timecolumn])) * pq.Hz t_start = sig[0,timecolumn] * pq.s for i in range(sig.shape[1]) : if timecolumn == i : continue if usecols is not None and i not in usecols: continue if lazy: signal = [ ]*unit else: signal = sig[:,i]*unit anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, channel_index=i, name='Column %d'%i) if lazy: anaSig.lazy_shape = sig.shape seg.analogsignals.append( anaSig ) seg.create_many_to_one_relationship() return seg
def _signal_da_to_neo(self, nix_da_group, lazy): """ Convert a group of NIX DataArrays to a Neo signal. This method expects a list of data arrays that all represent the same, multidimensional Neo Signal object. This returns either an AnalogSignal or IrregularlySampledSignal. :param nix_da_group: a list of NIX DataArray objects :return: a Neo Signal object """ nix_da_group = sorted(nix_da_group, key=lambda d: d.name) neo_attrs = self._nix_attr_to_neo(nix_da_group[0]) metadata = nix_da_group[0].metadata neo_type = nix_da_group[0].type neo_attrs["nix_name"] = metadata.name # use the common base name unit = nix_da_group[0].unit if lazy: signaldata = pq.Quantity(np.empty(0), unit) lazy_shape = (len(nix_da_group[0]), len(nix_da_group)) else: signaldata = np.array([d[:] for d in nix_da_group]).transpose() signaldata = pq.Quantity(signaldata, unit) lazy_shape = None timedim = self._get_time_dimension(nix_da_group[0]) if (neo_type == "neo.analogsignal" or timedim.dimension_type == nix.DimensionType.Sample): if lazy: sampling_period = pq.Quantity(1, timedim.unit) t_start = pq.Quantity(0, timedim.unit) else: if "sampling_interval.units" in metadata.props: sample_units = metadata["sampling_interval.units"] else: sample_units = timedim.unit sampling_period = pq.Quantity(timedim.sampling_interval, sample_units) if "t_start.units" in metadata.props: tsunits = metadata["t_start.units"] else: tsunits = timedim.unit t_start = pq.Quantity(timedim.offset, tsunits) neo_signal = AnalogSignal( signal=signaldata, sampling_period=sampling_period, t_start=t_start, **neo_attrs ) elif (neo_type == "neo.irregularlysampledsignal" or timedim.dimension_type == nix.DimensionType.Range): if lazy: times = pq.Quantity(np.empty(0), timedim.unit) else: times = pq.Quantity(timedim.ticks, timedim.unit) neo_signal = IrregularlySampledSignal( signal=signaldata, times=times, **neo_attrs ) else: return None for da in nix_da_group: self._neo_map[da.name] = neo_signal if lazy_shape: neo_signal.lazy_shape = lazy_shape return neo_signal
def read_segment(self , lazy = False, cascade = True): seg = Segment( file_origin = os.path.basename(self.filename), ) if not cascade: return seg fid = open(self.filename , 'rb') headertext = fid.read(2048) if PY3K: headertext = headertext.decode('ascii') header = {} for line in headertext.split('\r\n'): if '=' not in line : continue #print '#' , line , '#' key,val = line.split('=') if key in ['NC', 'NR','NBH','NBA','NBD','ADCMAX','NP','NZ','ADCMAX' ] : val = int(val) elif key in ['AD', 'DT', ] : val = val.replace(',','.') val = float(val) header[key] = val if not lazy: data = np.memmap(self.filename , np.dtype('i2') , 'r', #shape = (header['NC'], header['NP']) , shape = (header['NP']/header['NC'],header['NC'], ) , offset = header['NBH']) for c in range(header['NC']): YCF = float(header['YCF%d'%c].replace(',','.')) YAG = float(header['YAG%d'%c].replace(',','.')) YZ = float(header['YZ%d'%c].replace(',','.')) ADCMAX = header['ADCMAX'] AD = header['AD'] DT = header['DT'] if 'TU' in header: if header['TU'] == 'ms': DT *= .001 unit = header['YU%d'%c] try : unit = pq.Quantity(1., unit) except: unit = pq.Quantity(1., '') if lazy: signal = [ ] * unit else: signal = (data[:,header['YO%d'%c]].astype('f4')-YZ) *AD/( YCF*YAG*(ADCMAX+1)) * unit ana = AnalogSignal(signal, sampling_rate=pq.Hz / DT, t_start=0. * pq.s, name=header['YN%d' % c], channel_index=c) if lazy: ana.lazy_shape = header['NP']/header['NC'] seg.analogsignals.append(ana) seg.create_many_to_one_relationship() return seg
def _signal_da_to_neo(self, nix_da_group, lazy): """ Convert a group of NIX DataArrays to a Neo signal. This method expects a list of data arrays that all represent the same, multidimensional Neo Signal object. This returns either an AnalogSignal or IrregularlySampledSignal. :param nix_da_group: a list of NIX DataArray objects :return: a Neo Signal object """ nix_da_group = sorted(nix_da_group, key=lambda d: d.name) neo_attrs = self._nix_attr_to_neo(nix_da_group[0]) metadata = nix_da_group[0].metadata neo_type = nix_da_group[0].type neo_attrs["nix_name"] = metadata.name # use the common base name unit = nix_da_group[0].unit if lazy: signaldata = pq.Quantity(np.empty(0), unit) lazy_shape = (len(nix_da_group[0]), len(nix_da_group)) else: signaldata = np.array([d[:] for d in nix_da_group]).transpose() signaldata = pq.Quantity(signaldata, unit) lazy_shape = None timedim = self._get_time_dimension(nix_da_group[0]) if (neo_type == "neo.analogsignal" or timedim.dimension_type == nix.DimensionType.Sample): if lazy: sampling_period = pq.Quantity(1, timedim.unit) t_start = pq.Quantity(0, timedim.unit) else: if "sampling_interval.units" in metadata.props: sample_units = metadata["sampling_interval.units"] else: sample_units = timedim.unit sampling_period = pq.Quantity(timedim.sampling_interval, sample_units) if "t_start.units" in metadata.props: tsunits = metadata["t_start.units"] else: tsunits = timedim.unit t_start = pq.Quantity(timedim.offset, tsunits) neo_signal = AnalogSignal(signal=signaldata, sampling_period=sampling_period, t_start=t_start, **neo_attrs) elif (neo_type == "neo.irregularlysampledsignal" or timedim.dimension_type == nix.DimensionType.Range): if lazy: times = pq.Quantity(np.empty(0), timedim.unit) else: times = pq.Quantity(timedim.ticks, timedim.unit) neo_signal = IrregularlySampledSignal(signal=signaldata, times=times, **neo_attrs) else: return None for da in nix_da_group: self._object_map[da.id] = neo_signal if lazy_shape: neo_signal.lazy_shape = lazy_shape return neo_signal
def read_block(self, lazy = False, cascade = True, ): bl = Block() tankname = os.path.basename(self.dirname) bl.file_origin = tankname if not cascade : return bl for blockname in os.listdir(self.dirname): if blockname == 'TempBlk': continue subdir = os.path.join(self.dirname,blockname) if not os.path.isdir(subdir): continue seg = Segment(name = blockname) bl.segments.append( seg) #TSQ is the global index tsq_filename = os.path.join(subdir, tankname+'_'+blockname+'.tsq') dt = [('size','int32'), ('evtype','int32'), ('code','S4'), ('channel','uint16'), ('sortcode','uint16'), ('timestamp','float64'), ('eventoffset','int64'), ('dataformat','int32'), ('frequency','float32'), ] tsq = np.fromfile(tsq_filename, dtype = dt) #0x8801: 'EVTYPE_MARK' give the global_start global_t_start = tsq[tsq['evtype']==0x8801]['timestamp'][0] #TEV is the old data file if os.path.exists(os.path.join(subdir, tankname+'_'+blockname+'.tev')): tev_filename = os.path.join(subdir, tankname+'_'+blockname+'.tev') #tev_array = np.memmap(tev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead tev_array = np.fromfile(tev_filename, dtype = 'uint8') else: tev_filename = None for type_code, type_label in tdt_event_type: mask1 = tsq['evtype']==type_code codes = np.unique(tsq[mask1]['code']) for code in codes: mask2 = mask1 & (tsq['code']==code) channels = np.unique(tsq[mask2]['channel']) for channel in channels: mask3 = mask2 & (tsq['channel']==channel) if type_label in ['EVTYPE_STRON', 'EVTYPE_STROFF']: if lazy: times = [ ]*pq.s labels = np.array([ ], dtype = str) else: times = (tsq[mask3]['timestamp'] - global_t_start) * pq.s labels = tsq[mask3]['eventoffset'].view('float64').astype('S') ea = EventArray(times = times, name = code , channel_index = int(channel), labels = labels) if lazy: ea.lazy_shape = np.sum(mask3) seg.eventarrays.append(ea) elif type_label == 'EVTYPE_SNIP': sortcodes = np.unique(tsq[mask3]['sortcode']) for sortcode in sortcodes: mask4 = mask3 & (tsq['sortcode']==sortcode) nb_spike = np.sum(mask4) sr = tsq[mask4]['frequency'][0] waveformsize = tsq[mask4]['size'][0]-10 if lazy: times = [ ]*pq.s waveforms = None else: times = (tsq[mask4]['timestamp'] - global_t_start) * pq.s dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]]) waveforms = get_chunks(tsq[mask4]['size'],tsq[mask4]['eventoffset'], tev_array).view(dt) waveforms = waveforms.reshape(nb_spike, -1, waveformsize) waveforms = waveforms * pq.mV if nb_spike>0: # t_start = (tsq['timestamp'][0] - global_t_start) * pq.s # this hould work but not t_start = 0 *pq.s t_stop = (tsq['timestamp'][-1] - global_t_start) * pq.s else: t_start = 0 *pq.s t_stop = 0 *pq.s st = SpikeTrain(times = times, name = 'Chan{} Code{}'.format(channel,sortcode), t_start = t_start, t_stop = t_stop, waveforms = waveforms, left_sweep = waveformsize/2./sr * pq.s, sampling_rate = sr * pq.Hz, ) st.annotate(channel_index = channel) if lazy: st.lazy_shape = nb_spike seg.spiketrains.append(st) elif type_label == 'EVTYPE_STREAM': dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]]) shape = np.sum(tsq[mask3]['size']-10) sr = tsq[mask3]['frequency'][0] if lazy: signal = [ ] else: if PY3K: signame = code.decode('ascii') else: signame = code sev_filename = os.path.join(subdir, tankname+'_'+blockname+'_'+signame+'_ch'+str(channel)+'.sev') if os.path.exists(sev_filename): #sig_array = np.memmap(sev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead sig_array = np.fromfile(sev_filename, dtype = 'uint8') else: sig_array = tev_array signal = get_chunks(tsq[mask3]['size'],tsq[mask3]['eventoffset'], sig_array).view(dt) anasig = AnalogSignal(signal = signal* pq.V, name = '{} {}'.format(code, channel), sampling_rate= sr * pq.Hz, t_start = (tsq[mask3]['timestamp'][0] - global_t_start) * pq.s, channel_index = int(channel)) if lazy: anasig.lazy_shape = shape seg.analogsignals.append(anasig) bl.create_many_to_one_relationship() return bl
def read_segment(self, lazy=False, cascade=True): ## Read header file (vhdr) header = readBrainSoup(self.filename) assert header['Common Infos'][ 'DataFormat'] == 'BINARY', NotImplementedError assert header['Common Infos'][ 'DataOrientation'] == 'MULTIPLEXED', NotImplementedError nb_channel = int(header['Common Infos']['NumberOfChannels']) sampling_rate = 1.e6 / float( header['Common Infos']['SamplingInterval']) * pq.Hz fmt = header['Binary Infos']['BinaryFormat'] fmts = { 'INT_16': np.int16, 'IEEE_FLOAT_32': np.float32, } assert fmt in fmts, NotImplementedError dt = fmts[fmt] seg = Segment(file_origin=os.path.basename(self.filename), ) if not cascade: return seg # read binary if not lazy: binary_file = os.path.splitext(self.filename)[0] + '.eeg' sigs = np.memmap( binary_file, dt, 'r', ).astype('f') n = int(sigs.size / nb_channel) sigs = sigs[:n * nb_channel] sigs = sigs.reshape(n, nb_channel) for c in range(nb_channel): name, ref, res, units = header['Channel Infos']['Ch%d' % (c + 1, )].split(',') units = pq.Quantity(1, units.replace('µ', 'u')) if lazy: signal = [] * units else: signal = sigs[:, c] * units anasig = AnalogSignal( signal=signal, channel_index=c, name=name, sampling_rate=sampling_rate, ) if lazy: anasig.lazy_shape = -1 seg.analogsignals.append(anasig) # read marker marker_file = os.path.splitext(self.filename)[0] + '.vmrk' all_info = readBrainSoup(marker_file)['Marker Infos'] all_types = [] times = [] labels = [] for i in range(len(all_info)): type_, label, pos, size, channel = all_info['Mk%d' % (i + 1, )].split(',')[:5] all_types.append(type_) times.append(float(pos) / sampling_rate.magnitude) labels.append(label) all_types = np.array(all_types) times = np.array(times) * pq.s labels = np.array(labels, dtype='S') for type_ in np.unique(all_types): ind = type_ == all_types if lazy: ea = EventArray(name=str(type_)) ea.lazy_shape = -1 else: ea = EventArray( times=times[ind], labels=labels[ind], name=str(type_), ) seg.eventarrays.append(ea) create_many_to_one_relationship(seg) return seg
def read_block(self, # the 2 first keyword arguments are imposed by neo.io API lazy = False, cascade = True): """ Return a Block. """ def count_samples(m_length): """ Count the number of signal samples available in a type 5 data block of length m_length """ # for information about type 5 data block, see [1] count = int((m_length-6)/2-2) # -6 corresponds to the header of block 5, and the -2 take into # account the fact that last 2 values are not available as the 4 # corresponding bytes are coding the time stamp of the beginning # of the block return count # create the neo Block that will be returned at the end blck = Block(file_origin = os.path.basename(self.filename)) blck.file_origin = os.path.basename(self.filename) fid = open(self.filename, 'rb') # NOTE: in the following, the word "block" is used in the sense used in # the alpha-omega specifications (ie a data chunk in the file), rather # than in the sense of the usual Block object in neo # step 1: read the headers of all the data blocks to load the file # structure pos_block = 0 # position of the current block in the file file_blocks = [] # list of data blocks available in the file if not cascade: # we read only the main header m_length, m_TypeBlock = struct.unpack('Hcx' , fid.read(4)) # m_TypeBlock should be 'h', as we read the first block block = HeaderReader(fid, dict_header_type.get(m_TypeBlock, Type_Unknown)).read_f() block.update({'m_length': m_length, 'm_TypeBlock': m_TypeBlock, 'pos': pos_block}) file_blocks.append(block) else: # cascade == True seg = Segment(file_origin = os.path.basename(self.filename)) seg.file_origin = os.path.basename(self.filename) blck.segments.append(seg) while True: first_4_bytes = fid.read(4) if len(first_4_bytes) < 4: # we have reached the end of the file break else: m_length, m_TypeBlock = struct.unpack('Hcx', first_4_bytes) block = HeaderReader(fid, dict_header_type.get(m_TypeBlock, Type_Unknown)).read_f() block.update({'m_length': m_length, 'm_TypeBlock': m_TypeBlock, 'pos': pos_block}) if m_TypeBlock == '2': # The beginning of the block of type '2' is identical for # all types of channels, but the following part depends on # the type of channel. So we need a special case here. # WARNING: How to check the type of channel is not # described in the documentation. So here I use what is # proposed in the C code [2]. # According to this C code, it seems that the 'm_isAnalog' # is used to distinguished analog and digital channels, and # 'm_Mode' encodes the type of analog channel: # 0 for continuous, 1 for level, 2 for external trigger. # But in some files, I found channels that seemed to be # continuous channels with 'm_Modes' = 128 or 192. So I # decided to consider every channel with 'm_Modes' # different from 1 or 2 as continuous. I also couldn't # check that values of 1 and 2 are really for level and # external trigger as I had no test files containing data # of this types. type_subblock = 'unknown_channel_type(m_Mode=' \ + str(block['m_Mode'])+ ')' description = Type2_SubBlockUnknownChannels block.update({'m_Name': 'unknown_name'}) if block['m_isAnalog'] == 0: # digital channel type_subblock = 'digital' description = Type2_SubBlockDigitalChannels elif block['m_isAnalog'] == 1: # analog channel if block['m_Mode'] == 1: # level channel type_subblock = 'level' description = Type2_SubBlockLevelChannels elif block['m_Mode'] == 2: # external trigger channel type_subblock = 'external_trigger' description = Type2_SubBlockExtTriggerChannels else: # continuous channel type_subblock = 'continuous(Mode' \ + str(block['m_Mode']) +')' description = Type2_SubBlockContinuousChannels subblock = HeaderReader(fid, description).read_f() block.update(subblock) block.update({'type_subblock': type_subblock}) file_blocks.append(block) pos_block += m_length fid.seek(pos_block) # step 2: find the available channels list_chan = [] # list containing indexes of channel blocks for ind_block, block in enumerate(file_blocks): if block['m_TypeBlock'] == '2': list_chan.append(ind_block) # step 3: find blocks containing data for the available channels list_data = [] # list of lists of indexes of data blocks # corresponding to each channel for ind_chan, chan in enumerate(list_chan): list_data.append([]) num_chan = file_blocks[chan]['m_numChannel'] for ind_block, block in enumerate(file_blocks): if block['m_TypeBlock'] == '5': if block['m_numChannel'] == num_chan: list_data[ind_chan].append(ind_block) # step 4: compute the length (number of samples) of the channels chan_len = np.zeros(len(list_data), dtype = np.int) for ind_chan, list_blocks in enumerate(list_data): for ind_block in list_blocks: chan_len[ind_chan] += count_samples( file_blocks[ind_block]['m_length']) # step 5: find channels for which data are available ind_valid_chan = np.nonzero(chan_len)[0] # step 6: load the data # TODO give the possibility to load data as AnalogSignalArrays for ind_chan in ind_valid_chan: list_blocks = list_data[ind_chan] ind = 0 # index in the data vector # read time stamp for the beginning of the signal form = '<l' # reading format ind_block = list_blocks[0] count = count_samples(file_blocks[ind_block]['m_length']) fid.seek(file_blocks[ind_block]['pos']+6+count*2) buf = fid.read(struct.calcsize(form)) val = struct.unpack(form , buf) start_index = val[0] # WARNING: in the following blocks are read supposing taht they # are all contiguous and sorted in time. I don't know if it's # always the case. Maybe we should use the time stamp of each # data block to choose where to put the read data in the array. if not lazy: temp_array = np.empty(chan_len[ind_chan], dtype = np.int16) # NOTE: we could directly create an empty AnalogSignal and # load the data in it, but it is much faster to load data # in a temporary numpy array and create the AnalogSignals # from this temporary array for ind_block in list_blocks: count = count_samples( file_blocks[ind_block]['m_length']) fid.seek(file_blocks[ind_block]['pos']+6) temp_array[ind:ind+count] = \ np.fromfile(fid, dtype = np.int16, count = count) ind += count sampling_rate = \ file_blocks[list_chan[ind_chan]]['m_SampleRate'] * pq.kHz t_start = (start_index / sampling_rate).simplified if lazy: ana_sig = AnalogSignal([], sampling_rate = sampling_rate, t_start = t_start, name = file_blocks\ [list_chan[ind_chan]]['m_Name'], file_origin = \ os.path.basename(self.filename), units = pq.dimensionless) ana_sig.lazy_shape = chan_len[ind_chan] else: ana_sig = AnalogSignal(temp_array, sampling_rate = sampling_rate, t_start = t_start, name = file_blocks\ [list_chan[ind_chan]]['m_Name'], file_origin = \ os.path.basename(self.filename), units = pq.dimensionless) # todo apibreak: create ChannelIndex for each signals # ana_sig.channel_index = \ # file_blocks[list_chan[ind_chan]]['m_numChannel'] ana_sig.annotate(channel_name = \ file_blocks[list_chan[ind_chan]]['m_Name']) ana_sig.annotate(channel_type = \ file_blocks[list_chan[ind_chan]]['type_subblock']) seg.analogsignals.append(ana_sig) fid.close() if file_blocks[0]['m_TypeBlock'] == 'h': # this should always be true blck.rec_datetime = datetime.datetime(\ file_blocks[0]['m_date_year'], file_blocks[0]['m_date_month'], file_blocks[0]['m_date_day'], file_blocks[0]['m_time_hour'], file_blocks[0]['m_time_minute'], file_blocks[0]['m_time_second'], 10000 * file_blocks[0]['m_time_hsecond']) # the 10000 is here to convert m_time_hsecond from centisecond # to microsecond version = file_blocks[0]['m_version'] blck.annotate(alphamap_version = version) if cascade: seg.rec_datetime = blck.rec_datetime.replace() # I couldn't find a simple copy function for datetime, # using replace without arguments is a twisted way to make a # copy seg.annotate(alphamap_version = version) if cascade: blck.create_many_to_one_relationship() return blck
def read_segment(self, lazy=False, cascade=True): seg = Segment(file_origin=os.path.basename(self.filename), ) if not cascade: return seg fid = open(self.filename, 'rb') headertext = fid.read(2048) if PY3K: headertext = headertext.decode('ascii') header = {} for line in headertext.split('\r\n'): if '=' not in line: continue #print '#' , line , '#' key, val = line.split('=') if key in [ 'NC', 'NR', 'NBH', 'NBA', 'NBD', 'ADCMAX', 'NP', 'NZ', 'ADCMAX' ]: val = int(val) elif key in [ 'AD', 'DT', ]: val = val.replace(',', '.') val = float(val) header[key] = val if not lazy: data = np.memmap( self.filename, np.dtype('i2'), 'r', #shape = (header['NC'], header['NP']) , shape=( header['NP'] / header['NC'], header['NC'], ), offset=header['NBH']) for c in range(header['NC']): YCF = float(header['YCF%d' % c].replace(',', '.')) YAG = float(header['YAG%d' % c].replace(',', '.')) YZ = float(header['YZ%d' % c].replace(',', '.')) ADCMAX = header['ADCMAX'] AD = header['AD'] DT = header['DT'] if 'TU' in header: if header['TU'] == 'ms': DT *= .001 unit = header['YU%d' % c] try: unit = pq.Quantity(1., unit) except: unit = pq.Quantity(1., '') if lazy: signal = [] * unit else: signal = (data[:, header['YO%d' % c]].astype('f4') - YZ) * AD / (YCF * YAG * (ADCMAX + 1)) * unit ana = AnalogSignal(signal, sampling_rate=pq.Hz / DT, t_start=0. * pq.s, name=header['YN%d' % c], channel_index=c) if lazy: ana.lazy_shape = header['NP'] / header['NC'] seg.analogsignals.append(ana) create_many_to_one_relationship(seg) return seg
def read_segment(self, block_index=0, seg_index=0, lazy=False, signal_group_mode=None, load_waveforms=False, time_slice=None): """ :param block_index: int default 0. In case of several block block_index can be specified. :param seg_index: int default 0. Index of segment. :param lazy: False by default. :param signal_group_mode: 'split-all' or 'group-by-same-units' (default depend IO): This control behavior for grouping channels in AnalogSignal. * 'split-all': each channel will give an AnalogSignal * 'group-by-same-units' all channel sharing the same quantity units ar grouped in a 2D AnalogSignal :param load_waveforms: False by default. Control SpikeTrains.waveforms is None or not. :param time_slice: None by default means no limit. A time slice is (t_start, t_stop) both are quantities. All object AnalogSignal, SpikeTrain, Event, Epoch will load only in the slice. """ if lazy: warnings.warn( "Lazy is deprecated and will be replaced by ProxyObject functionality.", DeprecationWarning) if signal_group_mode is None: signal_group_mode = self._prefered_signal_group_mode # annotations seg_annotations = dict(self.raw_annotations['blocks'][block_index]['segments'][seg_index]) for k in ('signals', 'units', 'events'): seg_annotations.pop(k) seg_annotations = check_annotations(seg_annotations) seg = Segment(index=seg_index, **seg_annotations) seg_t_start = self.segment_t_start(block_index, seg_index) * pq.s seg_t_stop = self.segment_t_stop(block_index, seg_index) * pq.s # get only a slice of objects limited by t_start and t_stop time_slice = (t_start, t_stop) if time_slice is None: t_start, t_stop = None, None t_start_, t_stop_ = None, None else: assert not lazy, 'time slice only work when not lazy' t_start, t_stop = time_slice t_start = ensure_second(t_start) t_stop = ensure_second(t_stop) # checks limits if t_start < seg_t_start: t_start = seg_t_start if t_stop > seg_t_stop: t_stop = seg_t_stop # in float format in second (for rawio clip) t_start_, t_stop_ = float(t_start.magnitude), float(t_stop.magnitude) # new spiketrain limits seg_t_start = t_start seg_t_stop = t_stop # AnalogSignal signal_channels = self.header['signal_channels'] if signal_channels.size > 0: channel_indexes_list = self.get_group_channel_indexes() for channel_indexes in channel_indexes_list: sr = self.get_signal_sampling_rate(channel_indexes) * pq.Hz sig_t_start = self.get_signal_t_start( block_index, seg_index, channel_indexes) * pq.s sig_size = self.get_signal_size(block_index=block_index, seg_index=seg_index, channel_indexes=channel_indexes) if not lazy: # in case of time_slice get: get i_start, i_stop, new sig_t_start if t_stop is not None: i_stop = int((t_stop - sig_t_start).magnitude * sr.magnitude) if i_stop > sig_size: i_stop = sig_size else: i_stop = None if t_start is not None: i_start = int((t_start - sig_t_start).magnitude * sr.magnitude) if i_start < 0: i_start = 0 sig_t_start += (i_start / sr).rescale('s') else: i_start = None raw_signal = self.get_analogsignal_chunk(block_index=block_index, seg_index=seg_index, i_start=i_start, i_stop=i_stop, channel_indexes=channel_indexes) float_signal = self.rescale_signal_raw_to_float( raw_signal, dtype='float32', channel_indexes=channel_indexes) for i, (ind_within, ind_abs) in self._make_signal_channel_subgroups( channel_indexes, signal_group_mode=signal_group_mode).items(): units = np.unique(signal_channels[ind_abs]['units']) assert len(units) == 1 units = ensure_signal_units(units[0]) if signal_group_mode == 'split-all': # in that case annotations by channel is OK chan_index = ind_abs[0] d = self.raw_annotations['blocks'][block_index]['segments'][seg_index][ 'signals'][chan_index] annotations = dict(d) if 'name' not in annotations: annotations['name'] = signal_channels['name'][chan_index] else: # when channel are grouped by same unit # annotations have channel_names and channel_ids array # this will be moved in array annotations soon annotations = {} annotations['name'] = 'Channel bundle ({}) '.format( ','.join(signal_channels[ind_abs]['name'])) annotations['channel_names'] = signal_channels[ind_abs]['name'] annotations['channel_ids'] = signal_channels[ind_abs]['id'] annotations = check_annotations(annotations) if lazy: anasig = AnalogSignal(np.array([]), units=units, copy=False, sampling_rate=sr, t_start=sig_t_start, **annotations) anasig.lazy_shape = (sig_size, len(ind_within)) else: anasig = AnalogSignal(float_signal[:, ind_within], units=units, copy=False, sampling_rate=sr, t_start=sig_t_start, **annotations) seg.analogsignals.append(anasig) # SpikeTrain and waveforms (optional) unit_channels = self.header['unit_channels'] for unit_index in range(len(unit_channels)): if not lazy and load_waveforms: raw_waveforms = self.get_spike_raw_waveforms(block_index=block_index, seg_index=seg_index, unit_index=unit_index, t_start=t_start_, t_stop=t_stop_) float_waveforms = self.rescale_waveforms_to_float(raw_waveforms, dtype='float32', unit_index=unit_index) wf_units = ensure_signal_units(unit_channels['wf_units'][unit_index]) waveforms = pq.Quantity(float_waveforms, units=wf_units, dtype='float32', copy=False) wf_sampling_rate = unit_channels['wf_sampling_rate'][unit_index] wf_left_sweep = unit_channels['wf_left_sweep'][unit_index] if wf_left_sweep > 0: wf_left_sweep = float(wf_left_sweep) / wf_sampling_rate * pq.s else: wf_left_sweep = None wf_sampling_rate = wf_sampling_rate * pq.Hz else: waveforms = None wf_left_sweep = None wf_sampling_rate = None d = self.raw_annotations['blocks'][block_index]['segments'][seg_index]['units'][ unit_index] annotations = dict(d) if 'name' not in annotations: annotations['name'] = unit_channels['name'][c] annotations = check_annotations(annotations) if not lazy: spike_timestamp = self.get_spike_timestamps(block_index=block_index, seg_index=seg_index, unit_index=unit_index, t_start=t_start_, t_stop=t_stop_) spike_times = self.rescale_spike_timestamp(spike_timestamp, 'float64') sptr = SpikeTrain(spike_times, units='s', copy=False, t_start=seg_t_start, t_stop=seg_t_stop, waveforms=waveforms, left_sweep=wf_left_sweep, sampling_rate=wf_sampling_rate, **annotations) else: nb = self.spike_count(block_index=block_index, seg_index=seg_index, unit_index=unit_index) sptr = SpikeTrain(np.array([]), units='s', copy=False, t_start=seg_t_start, t_stop=seg_t_stop, **annotations) sptr.lazy_shape = (nb,) seg.spiketrains.append(sptr) # Events/Epoch event_channels = self.header['event_channels'] for chan_ind in range(len(event_channels)): if not lazy: ev_timestamp, ev_raw_durations, ev_labels = self.get_event_timestamps( block_index=block_index, seg_index=seg_index, event_channel_index=chan_ind, t_start=t_start_, t_stop=t_stop_) ev_times = self.rescale_event_timestamp(ev_timestamp, 'float64') * pq.s if ev_raw_durations is None: ev_durations = None else: ev_durations = self.rescale_epoch_duration(ev_raw_durations, 'float64') * pq.s ev_labels = ev_labels.astype('S') else: nb = self.event_count(block_index=block_index, seg_index=seg_index, event_channel_index=chan_ind) lazy_shape = (nb,) ev_times = np.array([]) * pq.s ev_labels = np.array([], dtype='S') ev_durations = np.array([]) * pq.s d = self.raw_annotations['blocks'][block_index]['segments'][seg_index]['events'][ chan_ind] annotations = dict(d) if 'name' not in annotations: annotations['name'] = event_channels['name'][chan_ind] annotations = check_annotations(annotations) if event_channels['type'][chan_ind] == b'event': e = Event(times=ev_times, labels=ev_labels, units='s', copy=False, **annotations) e.segment = seg seg.events.append(e) elif event_channels['type'][chan_ind] == b'epoch': e = Epoch(times=ev_times, durations=ev_durations, labels=ev_labels, units='s', copy=False, **annotations) e.segment = seg seg.epochs.append(e) if lazy: e.lazy_shape = lazy_shape seg.create_many_to_one_relationship() return seg
def read_segment(self, lazy=False, cascade=True): # # Read header file f = open(self.filename + '.ent', 'rU') #version version = f.readline() if version[:2] != 'V2' and version[:2] != 'V3': # raise('read only V2 .eeg.ent files') raise VersionError('Read only V2 or V3 .eeg.ent files. %s given' % version[:2]) #info info1 = f.readline()[:-1] info2 = f.readline()[:-1] # strange 2 line for datetime #line1 l = f.readline() r1 = re.findall('(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', l) r2 = re.findall('(\d+):(\d+):(\d+)', l) r3 = re.findall('(\d+)-(\d+)-(\d+)', l) YY, MM, DD, hh, mm, ss = (None, ) * 6 if len(r1) != 0: DD, MM, YY, hh, mm, ss = r1[0] elif len(r2) != 0: hh, mm, ss = r2[0] elif len(r3) != 0: DD, MM, YY = r3[0] #line2 l = f.readline() r1 = re.findall('(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)', l) r2 = re.findall('(\d+):(\d+):(\d+)', l) r3 = re.findall('(\d+)-(\d+)-(\d+)', l) if len(r1) != 0: DD, MM, YY, hh, mm, ss = r1[0] elif len(r2) != 0: hh, mm, ss = r2[0] elif len(r3) != 0: DD, MM, YY = r3[0] try: fulldatetime = datetime.datetime(int(YY), int(MM), int(DD), int(hh), int(mm), int(ss)) except: fulldatetime = None seg = Segment(file_origin=os.path.basename(self.filename), elan_version=version, info1=info1, info2=info2, rec_datetime=fulldatetime) if not cascade: return seg l = f.readline() l = f.readline() l = f.readline() # sampling rate sample l = f.readline() sampling_rate = 1. / float(l) * pq.Hz # nb channel l = f.readline() nbchannel = int(l) - 2 #channel label labels = [] for c in range(nbchannel + 2): labels.append(f.readline()[:-1]) # channel type types = [] for c in range(nbchannel + 2): types.append(f.readline()[:-1]) # channel unit units = [] for c in range(nbchannel + 2): units.append(f.readline()[:-1]) #print units #range min_physic = [] for c in range(nbchannel + 2): min_physic.append(float(f.readline())) max_physic = [] for c in range(nbchannel + 2): max_physic.append(float(f.readline())) min_logic = [] for c in range(nbchannel + 2): min_logic.append(float(f.readline())) max_logic = [] for c in range(nbchannel + 2): max_logic.append(float(f.readline())) #info filter info_filter = [] for c in range(nbchannel + 2): info_filter.append(f.readline()[:-1]) f.close() #raw data n = int(round(np.log(max_logic[0] - min_logic[0]) / np.log(2)) / 8) data = np.fromfile(self.filename, dtype='i' + str(n)) data = data.byteswap().reshape( (data.size / (nbchannel + 2), nbchannel + 2)).astype('f4') for c in range(nbchannel): if lazy: sig = [] else: sig = (data[:, c] - min_logic[c]) / ( max_logic[c] - min_logic[c]) * \ (max_physic[c] - min_physic[c]) + min_physic[c] try: unit = pq.Quantity(1, units[c]) except: unit = pq.Quantity(1, '') ana_sig = AnalogSignal( sig * unit, sampling_rate=sampling_rate, t_start=0. * pq.s, name=labels[c], channel_index=c) if lazy: ana_sig.lazy_shape = data.shape[0] ana_sig.annotate(channel_name=labels[c]) seg.analogsignals.append(ana_sig) # triggers f = open(self.filename + '.pos') times = [] labels = [] reject_codes = [] for l in f.readlines(): r = re.findall(' *(\d+) *(\d+) *(\d+) *', l) times.append(float(r[0][0]) / sampling_rate.magnitude) labels.append(str(r[0][1])) reject_codes.append(str(r[0][2])) if lazy: times = [] * pq.S labels = np.array([], dtype='S') reject_codes = [] else: times = np.array(times) * pq.s labels = np.array(labels) reject_codes = np.array(reject_codes) ea = Event(times=times, labels=labels, reject_codes=reject_codes) if lazy: ea.lazy_shape = len(times) seg.events.append(ea) f.close() seg.create_many_to_one_relationship() return seg
def _read_segment(self, fobject, lazy): """ Read a single segment with a single analogsignal Returns the segment or None if there are no more segments """ try: # float64 -- start time of the AnalogSignal t_start = np.fromfile(fobject, dtype=np.float64, count=1)[0] except IndexError: # if there are no more Segments, return return False # int16 -- index of the stimulus parameters seg_index = np.fromfile(fobject, dtype=np.int16, count=1)[0].tolist() # int16 -- number of stimulus parameters numelements = np.fromfile(fobject, dtype=np.int16, count=1)[0] # read the name strings for the stimulus parameters paramnames = [] for _ in range(numelements): # unit8 -- the number of characters in the string numchars = np.fromfile(fobject, dtype=np.uint8, count=1)[0] # char * numchars -- a single name string name = np.fromfile(fobject, dtype=np.uint8, count=numchars) # exclude invalid characters name = str(name[name >= 32].view("c").tostring()) # add the name to the list of names paramnames.append(name) # float32 * numelements -- the values for the stimulus parameters paramvalues = np.fromfile(fobject, dtype=np.float32, count=numelements) # combine parameter names and the parameters as a dict params = dict(zip(paramnames, paramvalues)) # int32 -- the number elements in the AnalogSignal numpts = np.fromfile(fobject, dtype=np.int32, count=1)[0] # int16 * numpts -- the AnalogSignal itself signal = np.fromfile(fobject, dtype=np.int16, count=numpts) # handle lazy loading if lazy: sig = AnalogSignal( [], t_start=t_start * pq.d, file_origin=self._filename, sampling_period=1.0 * pq.s, units=pq.mV, dtype=np.float, ) sig.lazy_shape = len(signal) else: sig = AnalogSignal( signal.astype(np.float) * pq.mV, t_start=t_start * pq.d, file_origin=self._filename, sampling_period=1.0 * pq.s, copy=False, ) # Note: setting the sampling_period to 1 s is arbitrary # load the AnalogSignal and parameters into a new Segment seg = Segment(file_origin=self._filename, index=seg_index, **params) seg.analogsignals = [sig] return seg
def read_segment( self, lazy=False, cascade=True, ): fid = open(self.filename, 'rb') globalHeader = HeaderReader(fid, GlobalHeader).read_f(offset=0) #~ print globalHeader #~ print 'version' , globalHeader['version'] seg = Segment() seg.file_origin = os.path.basename(self.filename) seg.annotate(neuroexplorer_version=globalHeader['version']) seg.annotate(comment=globalHeader['comment']) if not cascade: return seg offset = 544 for i in range(globalHeader['nvar']): entityHeader = HeaderReader( fid, EntityHeader).read_f(offset=offset + i * 208) entityHeader['name'] = entityHeader['name'].replace('\x00', '') #print 'i',i, entityHeader['type'] if entityHeader['type'] == 0: # neuron if lazy: spike_times = [] * pq.s else: spike_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) spike_times = spike_times.astype( 'f8') / globalHeader['freq'] * pq.s sptr = SpikeTrain( times=spike_times, t_start=globalHeader['tbeg'] / globalHeader['freq'] * pq.s, t_stop=globalHeader['tend'] / globalHeader['freq'] * pq.s, name=entityHeader['name'], ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index=entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 1: # event if lazy: event_times = [] * pq.s else: event_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) event_times = event_times.astype( 'f8') / globalHeader['freq'] * pq.s labels = np.array([''] * event_times.size, dtype='S') evar = EventArray(times=event_times, labels=labels, channel_name=entityHeader['name']) if lazy: evar.lazy_shape = entityHeader['n'] seg.eventarrays.append(evar) if entityHeader['type'] == 2: # interval if lazy: start_times = [] * pq.s stop_times = [] * pq.s else: start_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) start_times = start_times.astype( 'f8') / globalHeader['freq'] * pq.s stop_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'] + entityHeader['n'] * 4, ) stop_times = stop_times.astype( 'f') / globalHeader['freq'] * pq.s epar = EpochArray(times=start_times, durations=stop_times - start_times, labels=np.array([''] * start_times.size, dtype='S'), channel_name=entityHeader['name']) if lazy: epar.lazy_shape = entityHeader['n'] seg.epocharrays.append(epar) if entityHeader['type'] == 3: # spiketrain and wavefoms if lazy: spike_times = [] * pq.s waveforms = None else: spike_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) spike_times = spike_times.astype( 'f8') / globalHeader['freq'] * pq.s waveforms = np.memmap( self.filename, np.dtype('i2'), 'r', shape=(entityHeader['n'], 1, entityHeader['NPointsWave']), offset=entityHeader['offset'] + entityHeader['n'] * 4, ) waveforms = (waveforms.astype('f') * entityHeader['ADtoMV'] + entityHeader['MVOffset']) * pq.mV t_stop = globalHeader['tend'] / globalHeader['freq'] * pq.s if spike_times.size > 0: t_stop = max(t_stop, max(spike_times)) sptr = SpikeTrain( times=spike_times, t_start=globalHeader['tbeg'] / globalHeader['freq'] * pq.s, #~ t_stop = max(globalHeader['tend']/globalHeader['freq']*pq.s,max(spike_times)), t_stop=t_stop, name=entityHeader['name'], waveforms=waveforms, sampling_rate=entityHeader['WFrequency'] * pq.Hz, left_sweep=0 * pq.ms, ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index=entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 4: # popvectors pass if entityHeader['type'] == 5: # analog timestamps = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) timestamps = timestamps.astype('f8') / globalHeader['freq'] fragmentStarts = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) fragmentStarts = fragmentStarts.astype( 'f8') / globalHeader['freq'] t_start = timestamps[0] - fragmentStarts[0] / float( entityHeader['WFrequency']) del timestamps, fragmentStarts if lazy: signal = [] * pq.mV else: signal = np.memmap( self.filename, np.dtype('i2'), 'r', shape=(entityHeader['NPointsWave']), offset=entityHeader['offset'], ) signal = signal.astype('f') signal *= entityHeader['ADtoMV'] signal += entityHeader['MVOffset'] signal = signal * pq.mV anaSig = AnalogSignal( signal=signal, t_start=t_start * pq.s, sampling_rate=entityHeader['WFrequency'] * pq.Hz, name=entityHeader['name'], channel_index=entityHeader['WireNumber']) if lazy: anaSig.lazy_shape = entityHeader['NPointsWave'] seg.analogsignals.append(anaSig) if entityHeader['type'] == 6: # markers : TO TEST if lazy: times = [] * pq.s labels = np.array([], dtype='S') markertype = None else: times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) times = times.astype('f8') / globalHeader['freq'] * pq.s fid.seek(entityHeader['offset'] + entityHeader['n'] * 4) markertype = fid.read(64).replace('\x00', '') labels = np.memmap( self.filename, np.dtype('S' + str(entityHeader['MarkerLength'])), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'] + entityHeader['n'] * 4 + 64) ea = EventArray(times=times, labels=labels.view(np.ndarray), name=entityHeader['name'], channel_index=entityHeader['WireNumber'], marker_type=markertype) if lazy: ea.lazy_shape = entityHeader['n'] seg.eventarrays.append(ea) create_many_to_one_relationship(seg) return seg
def read_block( self, # the 2 first keyword arguments are imposed by neo.io API lazy=False, cascade=True): """ Return a Block. """ def count_samples(m_length): """ Count the number of signal samples available in a type 5 data block of length m_length """ # for information about type 5 data block, see [1] count = int((m_length - 6) / 2 - 2) # -6 corresponds to the header of block 5, and the -2 take into # account the fact that last 2 values are not available as the 4 # corresponding bytes are coding the time stamp of the beginning # of the block return count # create the neo Block that will be returned at the end blck = Block(file_origin=os.path.basename(self.filename)) blck.file_origin = os.path.basename(self.filename) fid = open(self.filename, 'rb') # NOTE: in the following, the word "block" is used in the sense used in # the alpha-omega specifications (ie a data chunk in the file), rather # than in the sense of the usual Block object in neo # step 1: read the headers of all the data blocks to load the file # structure pos_block = 0 # position of the current block in the file file_blocks = [] # list of data blocks available in the file if not cascade: # we read only the main header m_length, m_TypeBlock = struct.unpack('Hcx', fid.read(4)) # m_TypeBlock should be 'h', as we read the first block block = HeaderReader( fid, dict_header_type.get(m_TypeBlock, Type_Unknown)).read_f() block.update({ 'm_length': m_length, 'm_TypeBlock': m_TypeBlock, 'pos': pos_block }) file_blocks.append(block) else: # cascade == True seg = Segment(file_origin=os.path.basename(self.filename)) seg.file_origin = os.path.basename(self.filename) blck.segments.append(seg) while True: first_4_bytes = fid.read(4) if len(first_4_bytes) < 4: # we have reached the end of the file break else: m_length, m_TypeBlock = struct.unpack('Hcx', first_4_bytes) block = HeaderReader( fid, dict_header_type.get(m_TypeBlock, Type_Unknown)).read_f() block.update({ 'm_length': m_length, 'm_TypeBlock': m_TypeBlock, 'pos': pos_block }) if m_TypeBlock == '2': # The beggining of the block of type '2' is identical for # all types of channels, but the following part depends on # the type of channel. So we need a special case here. # WARNING: How to check the type of channel is not # described in the documentation. So here I use what is # proposed in the C code [2]. # According to this C code, it seems that the 'm_isAnalog' # is used to distinguished analog and digital channels, and # 'm_Mode' encodes the type of analog channel: # 0 for continuous, 1 for level, 2 for external trigger. # But in some files, I found channels that seemed to be # continuous channels with 'm_Modes' = 128 or 192. So I # decided to consider every channel with 'm_Modes' # different from 1 or 2 as continuous. I also couldn't # check that values of 1 and 2 are really for level and # external trigger as I had no test files containing data # of this types. type_subblock = 'unknown_channel_type(m_Mode=' \ + str(block['m_Mode'])+ ')' description = Type2_SubBlockUnknownChannels block.update({'m_Name': 'unknown_name'}) if block['m_isAnalog'] == 0: # digital channel type_subblock = 'digital' description = Type2_SubBlockDigitalChannels elif block['m_isAnalog'] == 1: # analog channel if block['m_Mode'] == 1: # level channel type_subblock = 'level' description = Type2_SubBlockLevelChannels elif block['m_Mode'] == 2: # external trigger channel type_subblock = 'external_trigger' description = Type2_SubBlockExtTriggerChannels else: # continuous channel type_subblock = 'continuous(Mode' \ + str(block['m_Mode']) +')' description = Type2_SubBlockContinuousChannels subblock = HeaderReader(fid, description).read_f() block.update(subblock) block.update({'type_subblock': type_subblock}) file_blocks.append(block) pos_block += m_length fid.seek(pos_block) # step 2: find the available channels list_chan = [] # list containing indexes of channel blocks for ind_block, block in enumerate(file_blocks): if block['m_TypeBlock'] == '2': list_chan.append(ind_block) # step 3: find blocks containing data for the available channels list_data = [] # list of lists of indexes of data blocks # corresponding to each channel for ind_chan, chan in enumerate(list_chan): list_data.append([]) num_chan = file_blocks[chan]['m_numChannel'] for ind_block, block in enumerate(file_blocks): if block['m_TypeBlock'] == '5': if block['m_numChannel'] == num_chan: list_data[ind_chan].append(ind_block) # step 4: compute the length (number of samples) of the channels chan_len = np.zeros(len(list_data), dtype=np.int) for ind_chan, list_blocks in enumerate(list_data): for ind_block in list_blocks: chan_len[ind_chan] += count_samples( file_blocks[ind_block]['m_length']) # step 5: find channels for which data are available ind_valid_chan = np.nonzero(chan_len)[0] # step 6: load the data # TODO give the possibility to load data as AnalogSignalArrays for ind_chan in ind_valid_chan: list_blocks = list_data[ind_chan] ind = 0 # index in the data vector # read time stamp for the beginning of the signal form = '<l' # reading format ind_block = list_blocks[0] count = count_samples(file_blocks[ind_block]['m_length']) fid.seek(file_blocks[ind_block]['pos'] + 6 + count * 2) buf = fid.read(struct.calcsize(form)) val = struct.unpack(form, buf) start_index = val[0] # WARNING: in the following blocks are read supposing taht they # are all contiguous and sorted in time. I don't know if it's # always the case. Maybe we should use the time stamp of each # data block to choose where to put the read data in the array. if not lazy: temp_array = np.empty(chan_len[ind_chan], dtype=np.int16) # NOTE: we could directly create an empty AnalogSignal and # load the data in it, but it is much faster to load data # in a temporary numpy array and create the AnalogSignals # from this temporary array for ind_block in list_blocks: count = count_samples( file_blocks[ind_block]['m_length']) fid.seek(file_blocks[ind_block]['pos'] + 6) temp_array[ind:ind+count] = \ np.fromfile(fid, dtype = np.int16, count = count) ind += count sampling_rate = \ file_blocks[list_chan[ind_chan]]['m_SampleRate'] * pq.kHz t_start = (start_index / sampling_rate).simplified if lazy: ana_sig = AnalogSignal([], sampling_rate = sampling_rate, t_start = t_start, name = file_blocks\ [list_chan[ind_chan]]['m_Name'], file_origin = \ os.path.basename(self.filename), units = pq.dimensionless) ana_sig.lazy_shape = chan_len[ind_chan] else: ana_sig = AnalogSignal(temp_array, sampling_rate = sampling_rate, t_start = t_start, name = file_blocks\ [list_chan[ind_chan]]['m_Name'], file_origin = \ os.path.basename(self.filename), units = pq.dimensionless) ana_sig.channel_index = \ file_blocks[list_chan[ind_chan]]['m_numChannel'] ana_sig.annotate(channel_name = \ file_blocks[list_chan[ind_chan]]['m_Name']) ana_sig.annotate(channel_type = \ file_blocks[list_chan[ind_chan]]['type_subblock']) seg.analogsignals.append(ana_sig) fid.close() if file_blocks[0]['m_TypeBlock'] == 'h': # this should always be true blck.rec_datetime = datetime.datetime(\ file_blocks[0]['m_date_year'], file_blocks[0]['m_date_month'], file_blocks[0]['m_date_day'], file_blocks[0]['m_time_hour'], file_blocks[0]['m_time_minute'], file_blocks[0]['m_time_second'], 10000 * file_blocks[0]['m_time_hsecond']) # the 10000 is here to convert m_time_hsecond from centisecond # to microsecond version = file_blocks[0]['m_version'] blck.annotate(alphamap_version=version) if cascade: seg.rec_datetime = blck.rec_datetime.replace() # I couldn't find a simple copy function for datetime, # using replace without arguments is a twisted way to make a # copy seg.annotate(alphamap_version=version) if cascade: populate_RecordingChannel(blck, remove_from_annotation=True) blck.create_many_to_one_relationship() return blck
def read_block(self , lazy = False, cascade = True, ): bl = Block( file_origin = os.path.basename(self.filename), ) if not cascade: return bl fid = open(self.filename , 'rb') headertext = fid.read(1024) if PY3K: headertext = headertext.decode('ascii') header = {} for line in headertext.split('\r\n'): if '=' not in line : continue #print '#' , line , '#' key,val = line.split('=') if key in ['NC', 'NR','NBH','NBA','NBD','ADCMAX','NP','NZ', ] : val = int(val) elif key in ['AD', 'DT', ] : val = val.replace(',','.') val = float(val) header[key] = val #print header SECTORSIZE = 512 # loop for record number for i in range(header['NR']): #print 'record ',i offset = 1024 + i*(SECTORSIZE*header['NBD']+1024) # read analysis zone analysisHeader = HeaderReader(fid , AnalysisDescription ).read_f(offset = offset) #print analysisHeader # read data NP = (SECTORSIZE*header['NBD'])/2 NP = NP - NP%header['NC'] NP = NP/header['NC'] if not lazy: data = np.memmap(self.filename , np.dtype('i2') , 'r', #shape = (header['NC'], header['NP']) , shape = (NP,header['NC'], ) , offset = offset+header['NBA']*SECTORSIZE) # create a segment seg = Segment() bl.segments.append(seg) for c in range(header['NC']): unit = header['YU%d'%c] try : unit = pq.Quantity(1., unit) except: unit = pq.Quantity(1., '') if lazy: signal = [ ] * unit else: YG = float(header['YG%d'%c].replace(',','.')) ADCMAX = header['ADCMAX'] VMax = analysisHeader['VMax'][c] signal = data[:,header['YO%d'%c]].astype('f4')*VMax/ADCMAX/YG * unit anaSig = AnalogSignal(signal, sampling_rate= pq.Hz / analysisHeader['SamplingInterval'] , t_start=analysisHeader['TimeRecorded'] * pq.s, name=header['YN%d'%c], channel_index=c) if lazy: anaSig.lazy_shape = NP seg.analogsignals.append(anaSig) fid.close() bl.create_many_to_one_relationship() return bl
def read_segment(self, block_index=0, seg_index=0, lazy=False, signal_group_mode=None, load_waveforms=False, time_slice=None): """ :param block_index: int default 0. In case of several block block_index can be specified. :param seg_index: int default 0. Index of segment. :param lazy: False by default. :param signal_group_mode: 'split-all' or 'group-by-same-units' (default depend IO): This control behavior for grouping channels in AnalogSignal. * 'split-all': each channel will give an AnalogSignal * 'group-by-same-units' all channel sharing the same quantity units ar grouped in a 2D AnalogSignal :param load_waveforms: False by default. Control SpikeTrains.waveforms is None or not. :param time_slice: None by default means no limit. A time slice is (t_start, t_stop) both are quantities. All object AnalogSignal, SpikeTrain, Event, Epoch will load only in the slice. """ if signal_group_mode is None: signal_group_mode = self._prefered_signal_group_mode # annotations seg_annotations = dict( self.raw_annotations['blocks'][block_index]['segments'][seg_index]) for k in ('signals', 'units', 'events'): seg_annotations.pop(k) seg_annotations = check_annotations(seg_annotations) seg = Segment(index=seg_index, **seg_annotations) seg_t_start = self.segment_t_start(block_index, seg_index) * pq.s seg_t_stop = self.segment_t_stop(block_index, seg_index) * pq.s # get only a slice of objects limited by t_start and t_stop time_slice = (t_start, t_stop) if time_slice is None: t_start, t_stop = None, None t_start_, t_stop_ = None, None else: assert not lazy, 'time slice only work when not lazy' t_start, t_stop = time_slice t_start = ensure_second(t_start) t_stop = ensure_second(t_stop) # checks limits if t_start < seg_t_start: t_start = seg_t_start if t_stop > seg_t_stop: t_stop = seg_t_stop # in float format in second (for rawio clip) t_start_, t_stop_ = float(t_start.magnitude), float( t_stop.magnitude) # new spiketrain limits seg_t_start = t_start seg_t_stop = t_stop # AnalogSignal signal_channels = self.header['signal_channels'] if signal_channels.size > 0: channel_indexes_list = self.get_group_channel_indexes() for channel_indexes in channel_indexes_list: sr = self.get_signal_sampling_rate(channel_indexes) * pq.Hz sig_t_start = self.get_signal_t_start(block_index, seg_index, channel_indexes) * pq.s sig_size = self.get_signal_size( block_index=block_index, seg_index=seg_index, channel_indexes=channel_indexes) if not lazy: # in case of time_slice get: get i_start, i_stop, new sig_t_start if t_stop is not None: i_stop = int( (t_stop - sig_t_start).magnitude * sr.magnitude) if i_stop > sig_size: i_stop = sig_size else: i_stop = None if t_start is not None: i_start = int( (t_start - sig_t_start).magnitude * sr.magnitude) if i_start < 0: i_start = 0 sig_t_start += (i_start / sr).rescale('s') else: i_start = None raw_signal = self.get_analogsignal_chunk( block_index=block_index, seg_index=seg_index, i_start=i_start, i_stop=i_stop, channel_indexes=channel_indexes) float_signal = self.rescale_signal_raw_to_float( raw_signal, dtype='float32', channel_indexes=channel_indexes) for i, (ind_within, ind_abs) in self._make_signal_channel_subgroups( channel_indexes, signal_group_mode=signal_group_mode).items(): units = np.unique(signal_channels[ind_abs]['units']) assert len(units) == 1 units = ensure_signal_units(units[0]) if signal_group_mode == 'split-all': # in that case annotations by channel is OK chan_index = ind_abs[0] d = self.raw_annotations['blocks'][block_index][ 'segments'][seg_index]['signals'][chan_index] annotations = dict(d) if 'name' not in annotations: annotations['name'] = signal_channels['name'][ chan_index] else: # when channel are grouped by same unit # annotations are empty... annotations = {} annotations['name'] = 'Channel bundle ({}) '.format( ','.join(signal_channels[ind_abs]['name'])) annotations = check_annotations(annotations) if lazy: anasig = AnalogSignal(np.array([]), units=units, copy=False, sampling_rate=sr, t_start=sig_t_start, **annotations) anasig.lazy_shape = (sig_size, len(ind_within)) else: anasig = AnalogSignal(float_signal[:, ind_within], units=units, copy=False, sampling_rate=sr, t_start=sig_t_start, **annotations) seg.analogsignals.append(anasig) # SpikeTrain and waveforms (optional) unit_channels = self.header['unit_channels'] for unit_index in range(len(unit_channels)): if not lazy and load_waveforms: raw_waveforms = self.get_spike_raw_waveforms( block_index=block_index, seg_index=seg_index, unit_index=unit_index, t_start=t_start_, t_stop=t_stop_) float_waveforms = self.rescale_waveforms_to_float( raw_waveforms, dtype='float32', unit_index=unit_index) wf_units = ensure_signal_units( unit_channels['wf_units'][unit_index]) waveforms = pq.Quantity(float_waveforms, units=wf_units, dtype='float32', copy=False) wf_sampling_rate = unit_channels['wf_sampling_rate'][ unit_index] wf_left_sweep = unit_channels['wf_left_sweep'][unit_index] if wf_left_sweep > 0: wf_left_sweep = float( wf_left_sweep) / wf_sampling_rate * pq.s else: wf_left_sweep = None wf_sampling_rate = wf_sampling_rate * pq.Hz else: waveforms = None wf_left_sweep = None wf_sampling_rate = None d = self.raw_annotations['blocks'][block_index]['segments'][ seg_index]['units'][unit_index] annotations = dict(d) if 'name' not in annotations: annotations['name'] = unit_channels['name'][c] annotations = check_annotations(annotations) if not lazy: spike_timestamp = self.get_spike_timestamps( block_index=block_index, seg_index=seg_index, unit_index=unit_index, t_start=t_start_, t_stop=t_stop_) spike_times = self.rescale_spike_timestamp( spike_timestamp, 'float64') sptr = SpikeTrain(spike_times, units='s', copy=False, t_start=seg_t_start, t_stop=seg_t_stop, waveforms=waveforms, left_sweep=wf_left_sweep, sampling_rate=wf_sampling_rate, **annotations) else: nb = self.spike_count(block_index=block_index, seg_index=seg_index, unit_index=unit_index) sptr = SpikeTrain(np.array([]), units='s', copy=False, t_start=seg_t_start, t_stop=seg_t_stop, **annotations) sptr.lazy_shape = (nb, ) seg.spiketrains.append(sptr) # Events/Epoch event_channels = self.header['event_channels'] for chan_ind in range(len(event_channels)): if not lazy: ev_timestamp, ev_raw_durations, ev_labels = self.get_event_timestamps( block_index=block_index, seg_index=seg_index, event_channel_index=chan_ind, t_start=t_start_, t_stop=t_stop_) ev_times = self.rescale_event_timestamp( ev_timestamp, 'float64') * pq.s if ev_raw_durations is None: ev_durations = None else: ev_durations = self.rescale_epoch_duration( ev_raw_durations, 'float64') * pq.s ev_labels = ev_labels.astype('S') else: nb = self.event_count(block_index=block_index, seg_index=seg_index, event_channel_index=chan_ind) lazy_shape = (nb, ) ev_times = np.array([]) * pq.s ev_labels = np.array([], dtype='S') ev_durations = np.array([]) * pq.s d = self.raw_annotations['blocks'][block_index]['segments'][ seg_index]['events'][chan_ind] annotations = dict(d) if 'name' not in annotations: annotations['name'] = event_channels['name'][chan_ind] annotations = check_annotations(annotations) if event_channels['type'][chan_ind] == b'event': e = Event(times=ev_times, labels=ev_labels, units='s', copy=False, **annotations) e.segment = seg seg.events.append(e) elif event_channels['type'][chan_ind] == b'epoch': e = Epoch(times=ev_times, durations=ev_durations, labels=ev_labels, units='s', copy=False, **annotations) e.segment = seg seg.epochs.append(e) if lazy: e.lazy_shape = lazy_shape seg.create_many_to_one_relationship() return seg
def read_segment(self, blockname=None, lazy=False, cascade=True, sortname=''): """ Read a single segment from the tank. Note that TDT blocks are Neo segments, and TDT tanks are Neo blocks, so here the 'blockname' argument refers to the TDT block's name, which will be the Neo segment name. sortname is used to specify the external sortcode generated by offline spike sorting, if sortname=='PLX', there should be a ./sort/PLX/*.SortResult file in the tdt block, which stores the sortcode for every spike, default to '', which uses the original online sort """ if not blockname: blockname = os.listdir(self.dirname)[0] if blockname == 'TempBlk': return None if not self.is_tdtblock(blockname): return None # if not a tdt block subdir = os.path.join(self.dirname, blockname) if not os.path.isdir(subdir): return None seg = Segment(name=blockname) tankname = os.path.basename(self.dirname) #TSQ is the global index tsq_filename = os.path.join(subdir, tankname+'_'+blockname+'.tsq') dt = [('size','int32'), ('evtype','int32'), ('code','S4'), ('channel','uint16'), ('sortcode','uint16'), ('timestamp','float64'), ('eventoffset','int64'), ('dataformat','int32'), ('frequency','float32'), ] tsq = np.fromfile(tsq_filename, dtype=dt) #0x8801: 'EVTYPE_MARK' give the global_start global_t_start = tsq[tsq['evtype']==0x8801]['timestamp'][0] #TEV is the old data file try: tev_filename = os.path.join(subdir, tankname+'_'+blockname+'.tev') #tev_array = np.memmap(tev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead tev_array = np.fromfile(tev_filename, dtype='uint8') except IOError: tev_filename = None #if exists an external sortcode in ./sort/[sortname]/*.SortResult (generated after offline sortting) sortresult_filename = None if sortname is not '': try: for file in os.listdir(os.path.join(subdir, 'sort', sortname)): if file.endswith(".SortResult"): sortresult_filename = os.path.join(subdir, 'sort', sortname, file) # get new sortcode newsorcode = np.fromfile(sortresult_filename,'int8')[1024:] # the first 1024 byte is file header # update the sort code with the info from this file tsq['sortcode'][1:-1]=newsorcode # print('sortcode updated') break except OSError: sortresult_filename = None except IOError: sortresult_filename = None for type_code, type_label in tdt_event_type: mask1 = tsq['evtype']==type_code codes = np.unique(tsq[mask1]['code']) for code in codes: mask2 = mask1 & (tsq['code']==code) channels = np.unique(tsq[mask2]['channel']) for channel in channels: mask3 = mask2 & (tsq['channel']==channel) if type_label in ['EVTYPE_STRON', 'EVTYPE_STROFF']: if lazy: times = [ ]*pq.s labels = np.array([ ], dtype=str) else: times = (tsq[mask3]['timestamp'] - global_t_start) * pq.s labels = tsq[mask3]['eventoffset'].view('float64').astype('S') ea = Event(times=times, name=code, channel_index=int(channel), labels=labels) if lazy: ea.lazy_shape = np.sum(mask3) seg.events.append(ea) elif type_label == 'EVTYPE_SNIP': sortcodes = np.unique(tsq[mask3]['sortcode']) for sortcode in sortcodes: mask4 = mask3 & (tsq['sortcode']==sortcode) nb_spike = np.sum(mask4) sr = tsq[mask4]['frequency'][0] waveformsize = tsq[mask4]['size'][0]-10 if lazy: times = [ ]*pq.s waveforms = None else: times = (tsq[mask4]['timestamp'] - global_t_start) * pq.s dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]]) waveforms = get_chunks(tsq[mask4]['size'],tsq[mask4]['eventoffset'], tev_array).view(dt) waveforms = waveforms.reshape(nb_spike, -1, waveformsize) waveforms = waveforms * pq.mV if nb_spike > 0: # t_start = (tsq['timestamp'][0] - global_t_start) * pq.s # this hould work but not t_start = 0 *pq.s t_stop = (tsq['timestamp'][-1] - global_t_start) * pq.s else: t_start = 0 *pq.s t_stop = 0 *pq.s st = SpikeTrain(times = times, name = 'Chan{0} Code{1}'.format(channel,sortcode), t_start = t_start, t_stop = t_stop, waveforms = waveforms, left_sweep = waveformsize/2./sr * pq.s, sampling_rate = sr * pq.Hz, ) st.annotate(channel_index=channel) if lazy: st.lazy_shape = nb_spike seg.spiketrains.append(st) elif type_label == 'EVTYPE_STREAM': dt = np.dtype(data_formats[ tsq[mask3]['dataformat'][0]]) shape = np.sum(tsq[mask3]['size']-10) sr = tsq[mask3]['frequency'][0] if lazy: signal = [ ] else: if PY3K: signame = code.decode('ascii') else: signame = code sev_filename = os.path.join(subdir, tankname+'_'+blockname+'_'+signame+'_ch'+str(channel)+'.sev') try: #sig_array = np.memmap(sev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead sig_array = np.fromfile(sev_filename, dtype='uint8') except IOError: sig_array = tev_array signal = get_chunks(tsq[mask3]['size'],tsq[mask3]['eventoffset'], sig_array).view(dt) anasig = AnalogSignal(signal = signal* pq.V, name = '{0} {1}'.format(code, channel), sampling_rate = sr * pq.Hz, t_start = (tsq[mask3]['timestamp'][0] - global_t_start) * pq.s, channel_index = int(channel) ) if lazy: anasig.lazy_shape = shape seg.analogsignals.append(anasig) return seg
def read_block(self, lazy=False, cascade=True): header = self.read_header() version = header['fFileVersionNumber'] bl = Block() bl.file_origin = os.path.basename(self.filename) bl.annotate(abf_version=str(version)) # date and time if version < 2.: YY = 1900 MM = 1 DD = 1 hh = int(header['lFileStartTime'] / 3600.) mm = int((header['lFileStartTime'] - hh * 3600) / 60) ss = header['lFileStartTime'] - hh * 3600 - mm * 60 ms = int(np.mod(ss, 1) * 1e6) ss = int(ss) elif version >= 2.: YY = int(header['uFileStartDate'] / 10000) MM = int((header['uFileStartDate'] - YY * 10000) / 100) DD = int(header['uFileStartDate'] - YY * 10000 - MM * 100) hh = int(header['uFileStartTimeMS'] / 1000. / 3600.) mm = int((header['uFileStartTimeMS'] / 1000. - hh * 3600) / 60) ss = header['uFileStartTimeMS'] / 1000. - hh * 3600 - mm * 60 ms = int(np.mod(ss, 1) * 1e6) ss = int(ss) bl.rec_datetime = datetime.datetime(YY, MM, DD, hh, mm, ss, ms) if not cascade: return bl # file format if header['nDataFormat'] == 0: dt = np.dtype('i2') elif header['nDataFormat'] == 1: dt = np.dtype('f4') if version < 2.: nbchannel = header['nADCNumChannels'] head_offset = header['lDataSectionPtr'] * BLOCKSIZE + header[ 'nNumPointsIgnored'] * dt.itemsize totalsize = header['lActualAcqLength'] elif version >= 2.: nbchannel = header['sections']['ADCSection']['llNumEntries'] head_offset = header['sections']['DataSection'][ 'uBlockIndex'] * BLOCKSIZE totalsize = header['sections']['DataSection']['llNumEntries'] data = np.memmap(self.filename, dt, 'r', shape=(totalsize, ), offset=head_offset) # 3 possible modes if version < 2.: mode = header['nOperationMode'] elif version >= 2.: mode = header['protocol']['nOperationMode'] if (mode == 1) or (mode == 2) or (mode == 5) or (mode == 3): # event-driven variable-length mode (mode 1) # event-driven fixed-length mode (mode 2 or 5) # gap free mode (mode 3) can be in several episodes # read sweep pos if version < 2.: nbepisod = header['lSynchArraySize'] offset_episode = header['lSynchArrayPtr'] * BLOCKSIZE elif version >= 2.: nbepisod = header['sections']['SynchArraySection'][ 'llNumEntries'] offset_episode = header['sections']['SynchArraySection'][ 'uBlockIndex'] * BLOCKSIZE if nbepisod > 0: episode_array = np.memmap(self.filename, [('offset', 'i4'), ('len', 'i4')], 'r', shape=nbepisod, offset=offset_episode) else: episode_array = np.empty(1, [('offset', 'i4'), ('len', 'i4')]) episode_array[0]['len'] = data.size episode_array[0]['offset'] = 0 # sampling_rate if version < 2.: sampling_rate = 1. / (header['fADCSampleInterval'] * nbchannel * 1.e-6) * pq.Hz elif version >= 2.: sampling_rate = 1.e6 / \ header['protocol']['fADCSequenceInterval'] * pq.Hz # construct block # one sweep = one segment in a block pos = 0 for j in range(episode_array.size): seg = Segment(index=j) length = episode_array[j]['len'] if version < 2.: fSynchTimeUnit = header['fSynchTimeUnit'] elif version >= 2.: fSynchTimeUnit = header['protocol']['fSynchTimeUnit'] if (fSynchTimeUnit != 0) and (mode == 1): length /= fSynchTimeUnit if not lazy: subdata = data[pos:pos + length] subdata = subdata.reshape( (int(subdata.size / nbchannel), nbchannel)).astype('f') if dt == np.dtype('i2'): if version < 2.: reformat_integer_v1(subdata, nbchannel, header) elif version >= 2.: reformat_integer_v2(subdata, nbchannel, header) pos += length if version < 2.: chans = [ chan_num for chan_num in header['nADCSamplingSeq'] if chan_num >= 0 ] else: chans = range(nbchannel) for n, i in enumerate(chans[:nbchannel]): # fix SamplingSeq if version < 2.: name = header['sADCChannelName'][i].replace(b' ', b'') unit = header['sADCUnits'][i].replace(b'\xb5', b'u').\ replace(b' ', b'').decode('utf-8') # \xb5 is µ num = header['nADCPtoLChannelMap'][i] elif version >= 2.: lADCIi = header['listADCInfo'][i] name = lADCIi['ADCChNames'].replace(b' ', b'') unit = lADCIi['ADCChUnits'].replace(b'\xb5', b'u').\ replace(b' ', b'').decode('utf-8') num = header['listADCInfo'][i]['nADCNum'] if (fSynchTimeUnit == 0): t_start = float( episode_array[j]['offset']) / sampling_rate else: t_start = float(episode_array[j]['offset'] ) * fSynchTimeUnit * 1e-6 * pq.s t_start = t_start.rescale('s') try: pq.Quantity(1, unit) except: unit = '' if lazy: signal = [] * pq.Quantity(1, unit) else: signal = pq.Quantity(subdata[:, n], unit) anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, name=str(name), channel_index=int(num)) if lazy: anaSig.lazy_shape = length / nbchannel seg.analogsignals.append(anaSig) bl.segments.append(seg) if mode in [3, 5]: # TODO check if tags exits in other mode # tag is EventArray that should be attached to Block # It is attched to the first Segment times = [] labels = [] comments = [] for i, tag in enumerate(header['listTag']): times.append(tag['lTagTime'] / sampling_rate) labels.append(str(tag['nTagType'])) comments.append(clean_string(tag['sComment'])) times = np.array(times) labels = np.array(labels, dtype='S') comments = np.array(comments, dtype='S') # attach all tags to the first segment. seg = bl.segments[0] if lazy: ea = Event(times=[] * pq.s, labels=np.array([], dtype='S')) ea.lazy_shape = len(times) else: ea = Event(times=times * pq.s, labels=labels, comments=comments) seg.events.append(ea) bl.create_many_to_one_relationship() return bl
def read_segment(self, import_neuroshare_segment=True, lazy=False, cascade=True): """ Arguments: import_neuroshare_segment: import neuroshare segment as SpikeTrain with associated waveforms or not imported at all. """ seg = Segment(file_origin=os.path.basename(self.filename), ) if sys.platform.startswith('win'): neuroshare = ctypes.windll.LoadLibrary(self.dllname) elif sys.platform.startswith('linux'): neuroshare = ctypes.cdll.LoadLibrary(self.dllname) neuroshare = DllWithError(neuroshare) #elif sys.platform.startswith('darwin'): # API version info = ns_LIBRARYINFO() neuroshare.ns_GetLibraryInfo(ctypes.byref(info), ctypes.sizeof(info)) seg.annotate(neuroshare_version=str(info.dwAPIVersionMaj) + '.' + str(info.dwAPIVersionMin)) if not cascade: return seg # open file hFile = ctypes.c_uint32(0) neuroshare.ns_OpenFile(ctypes.c_char_p(self.filename), ctypes.byref(hFile)) fileinfo = ns_FILEINFO() neuroshare.ns_GetFileInfo(hFile, ctypes.byref(fileinfo), ctypes.sizeof(fileinfo)) # read all entities for dwEntityID in range(fileinfo.dwEntityCount): entityInfo = ns_ENTITYINFO() neuroshare.ns_GetEntityInfo(hFile, dwEntityID, ctypes.byref(entityInfo), ctypes.sizeof(entityInfo)) # EVENT if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_EVENT': pEventInfo = ns_EVENTINFO() neuroshare.ns_GetEventInfo(hFile, dwEntityID, ctypes.byref(pEventInfo), ctypes.sizeof(pEventInfo)) if pEventInfo.dwEventType == 0: #TEXT pData = ctypes.create_string_buffer( pEventInfo.dwMaxDataLength) elif pEventInfo.dwEventType == 1: #CVS pData = ctypes.create_string_buffer( pEventInfo.dwMaxDataLength) elif pEventInfo.dwEventType == 2: # 8bit pData = ctypes.c_byte(0) elif pEventInfo.dwEventType == 3: # 16bit pData = ctypes.c_int16(0) elif pEventInfo.dwEventType == 4: # 32bit pData = ctypes.c_int32(0) pdTimeStamp = ctypes.c_double(0.) pdwDataRetSize = ctypes.c_uint32(0) ea = Event(name=str(entityInfo.szEntityLabel), ) if not lazy: times = [] labels = [] for dwIndex in range(entityInfo.dwItemCount): neuroshare.ns_GetEventData( hFile, dwEntityID, dwIndex, ctypes.byref(pdTimeStamp), ctypes.byref(pData), ctypes.sizeof(pData), ctypes.byref(pdwDataRetSize)) times.append(pdTimeStamp.value) labels.append(str(pData.value)) ea.times = times * pq.s ea.labels = np.array(labels, dtype='S') else: ea.lazy_shape = entityInfo.dwItemCount seg.eventarrays.append(ea) # analog if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_ANALOG': pAnalogInfo = ns_ANALOGINFO() neuroshare.ns_GetAnalogInfo(hFile, dwEntityID, ctypes.byref(pAnalogInfo), ctypes.sizeof(pAnalogInfo)) dwIndexCount = entityInfo.dwItemCount if lazy: signal = [] * pq.Quantity(1, pAnalogInfo.szUnits) else: pdwContCount = ctypes.c_uint32(0) pData = np.zeros((entityInfo.dwItemCount, ), dtype='float64') total_read = 0 while total_read < entityInfo.dwItemCount: dwStartIndex = ctypes.c_uint32(total_read) dwStopIndex = ctypes.c_uint32(entityInfo.dwItemCount - total_read) neuroshare.ns_GetAnalogData( hFile, dwEntityID, dwStartIndex, dwStopIndex, ctypes.byref(pdwContCount), pData[total_read:].ctypes.data_as( ctypes.POINTER(ctypes.c_double))) total_read += pdwContCount.value signal = pq.Quantity(pData, units=pAnalogInfo.szUnits, copy=False) #t_start dwIndex = 0 pdTime = ctypes.c_double(0) neuroshare.ns_GetTimeByIndex(hFile, dwEntityID, dwIndex, ctypes.byref(pdTime)) anaSig = AnalogSignal( signal, sampling_rate=pAnalogInfo.dSampleRate * pq.Hz, t_start=pdTime.value * pq.s, name=str(entityInfo.szEntityLabel), ) anaSig.annotate(probe_info=str(pAnalogInfo.szProbeInfo)) if lazy: anaSig.lazy_shape = entityInfo.dwItemCount seg.analogsignals.append(anaSig) #segment if entity_types[ entityInfo. dwEntityType] == 'ns_ENTITY_SEGMENT' and import_neuroshare_segment: pdwSegmentInfo = ns_SEGMENTINFO() if not str(entityInfo.szEntityLabel).startswith('spks'): continue neuroshare.ns_GetSegmentInfo(hFile, dwEntityID, ctypes.byref(pdwSegmentInfo), ctypes.sizeof(pdwSegmentInfo)) nsource = pdwSegmentInfo.dwSourceCount pszMsgBuffer = ctypes.create_string_buffer(" " * 256) neuroshare.ns_GetLastErrorMsg(ctypes.byref(pszMsgBuffer), 256) for dwSourceID in range(pdwSegmentInfo.dwSourceCount): pSourceInfo = ns_SEGSOURCEINFO() neuroshare.ns_GetSegmentSourceInfo( hFile, dwEntityID, dwSourceID, ctypes.byref(pSourceInfo), ctypes.sizeof(pSourceInfo)) if lazy: sptr = SpikeTrain(times, name=str(entityInfo.szEntityLabel), t_stop=0. * pq.s) sptr.lazy_shape = entityInfo.dwItemCount else: pdTimeStamp = ctypes.c_double(0.) dwDataBufferSize = pdwSegmentInfo.dwMaxSampleCount * pdwSegmentInfo.dwSourceCount pData = np.zeros((dwDataBufferSize), dtype='float64') pdwSampleCount = ctypes.c_uint32(0) pdwUnitID = ctypes.c_uint32(0) nsample = int(dwDataBufferSize) times = np.empty((entityInfo.dwItemCount), dtype='f') waveforms = np.empty( (entityInfo.dwItemCount, nsource, nsample), dtype='f') for dwIndex in range(entityInfo.dwItemCount): neuroshare.ns_GetSegmentData( hFile, dwEntityID, dwIndex, ctypes.byref(pdTimeStamp), pData.ctypes.data_as( ctypes.POINTER(ctypes.c_double)), dwDataBufferSize * 8, ctypes.byref(pdwSampleCount), ctypes.byref(pdwUnitID)) times[dwIndex] = pdTimeStamp.value waveforms[ dwIndex, :, :] = pData[:nsample * nsource].reshape( nsample, nsource).transpose() sptr = SpikeTrain( times=pq.Quantity(times, units='s', copy=False), t_stop=times.max(), waveforms=pq.Quantity(waveforms, units=str( pdwSegmentInfo.szUnits), copy=False), left_sweep=nsample / 2. / float(pdwSegmentInfo.dSampleRate) * pq.s, sampling_rate=float(pdwSegmentInfo.dSampleRate) * pq.Hz, name=str(entityInfo.szEntityLabel), ) seg.spiketrains.append(sptr) # neuralevent if entity_types[ entityInfo.dwEntityType] == 'ns_ENTITY_NEURALEVENT': pNeuralInfo = ns_NEURALINFO() neuroshare.ns_GetNeuralInfo(hFile, dwEntityID, ctypes.byref(pNeuralInfo), ctypes.sizeof(pNeuralInfo)) if lazy: times = [] * pq.s t_stop = 0 * pq.s else: pData = np.zeros((entityInfo.dwItemCount, ), dtype='float64') dwStartIndex = 0 dwIndexCount = entityInfo.dwItemCount neuroshare.ns_GetNeuralData( hFile, dwEntityID, dwStartIndex, dwIndexCount, pData.ctypes.data_as(ctypes.POINTER(ctypes.c_double))) times = pData * pq.s t_stop = times.max() sptr = SpikeTrain( times, t_stop=t_stop, name=str(entityInfo.szEntityLabel), ) if lazy: sptr.lazy_shape = entityInfo.dwItemCount seg.spiketrains.append(sptr) # close neuroshare.ns_CloseFile(hFile) seg.create_many_to_one_relationship() return seg
def _read_segment(self, fobject, lazy): ''' Read a single segment with a single analogsignal Returns the segment or None if there are no more segments ''' try: # float64 -- start time of the AnalogSignal t_start = np.fromfile(fobject, dtype=np.float64, count=1)[0] except IndexError: # if there are no more Segments, return return False # int16 -- index of the stimulus parameters seg_index = np.fromfile(fobject, dtype=np.int16, count=1)[0].tolist() # int16 -- number of stimulus parameters numelements = np.fromfile(fobject, dtype=np.int16, count=1)[0] # read the name strings for the stimulus parameters paramnames = [] for _ in range(numelements): # unit8 -- the number of characters in the string numchars = np.fromfile(fobject, dtype=np.uint8, count=1)[0] # char * numchars -- a single name string name = np.fromfile(fobject, dtype=np.uint8, count=numchars) # exclude invalid characters name = str(name[name >= 32].view('c').tostring()) # add the name to the list of names paramnames.append(name) # float32 * numelements -- the values for the stimulus parameters paramvalues = np.fromfile(fobject, dtype=np.float32, count=numelements) # combine parameter names and the parameters as a dict params = dict(zip(paramnames, paramvalues)) # int32 -- the number elements in the AnalogSignal numpts = np.fromfile(fobject, dtype=np.int32, count=1)[0] # int16 * numpts -- the AnalogSignal itself signal = np.fromfile(fobject, dtype=np.int16, count=numpts) # handle lazy loading if lazy: sig = AnalogSignal([], t_start=t_start*pq.d, file_origin=self._filename, sampling_period=1.*pq.s, units=pq.mV, dtype=np.float) sig.lazy_shape = len(signal) else: sig = AnalogSignal(signal.astype(np.float)*pq.mV, t_start=t_start*pq.d, file_origin=self._filename, sampling_period=1.*pq.s, copy=False) # Note: setting the sampling_period to 1 s is arbitrary # load the AnalogSignal and parameters into a new Segment seg = Segment(file_origin=self._filename, index=seg_index, **params) seg.analogsignals = [sig] return seg
def read_segment(self, blockname=None, lazy=False, cascade=True, sortname=''): """ Read a single segment from the tank. Note that TDT blocks are Neo segments, and TDT tanks are Neo blocks, so here the 'blockname' argument refers to the TDT block's name, which will be the Neo segment name. 'sortname' is used to specify the external sortcode generated by offline spike sorting. if sortname=='PLX', there should be a ./sort/PLX/*.SortResult file in the tdt block, which stores the sortcode for every spike; defaults to '', which uses the original online sort """ if not blockname: blockname = os.listdir(self.dirname)[0] if blockname == 'TempBlk': return None if not self.is_tdtblock(blockname): return None # if not a tdt block subdir = os.path.join(self.dirname, blockname) if not os.path.isdir(subdir): return None seg = Segment(name=blockname) tankname = os.path.basename(self.dirname) #TSQ is the global index tsq_filename = os.path.join(subdir, tankname + '_' + blockname + '.tsq') dt = [ ('size', 'int32'), ('evtype', 'int32'), ('code', 'S4'), ('channel', 'uint16'), ('sortcode', 'uint16'), ('timestamp', 'float64'), ('eventoffset', 'int64'), ('dataformat', 'int32'), ('frequency', 'float32'), ] tsq = np.fromfile(tsq_filename, dtype=dt) #0x8801: 'EVTYPE_MARK' give the global_start global_t_start = tsq[tsq['evtype'] == 0x8801]['timestamp'][0] #TEV is the old data file try: tev_filename = os.path.join(subdir, tankname + '_' + blockname + '.tev') #tev_array = np.memmap(tev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead tev_array = np.fromfile(tev_filename, dtype='uint8') except IOError: tev_filename = None #if there exists an external sortcode in ./sort/[sortname]/*.SortResult (generated after offline sortting) sortresult_filename = None if sortname is not '': try: for file in os.listdir(os.path.join(subdir, 'sort', sortname)): if file.endswith(".SortResult"): sortresult_filename = os.path.join( subdir, 'sort', sortname, file) # get new sortcode newsorcode = np.fromfile(sortresult_filename, 'int8')[ 1024:] # the first 1024 byte is file header # update the sort code with the info from this file tsq['sortcode'][1:-1] = newsorcode # print('sortcode updated') break except OSError: sortresult_filename = None except IOError: sortresult_filename = None for type_code, type_label in tdt_event_type: mask1 = tsq['evtype'] == type_code codes = np.unique(tsq[mask1]['code']) for code in codes: mask2 = mask1 & (tsq['code'] == code) channels = np.unique(tsq[mask2]['channel']) for channel in channels: mask3 = mask2 & (tsq['channel'] == channel) if type_label in ['EVTYPE_STRON', 'EVTYPE_STROFF']: if lazy: times = [] * pq.s labels = np.array([], dtype=str) else: times = (tsq[mask3]['timestamp'] - global_t_start) * pq.s labels = tsq[mask3]['eventoffset'].view( 'float64').astype('S') ea = Event(times=times, name=code, channel_index=int(channel), labels=labels) if lazy: ea.lazy_shape = np.sum(mask3) seg.events.append(ea) elif type_label == 'EVTYPE_SNIP': sortcodes = np.unique(tsq[mask3]['sortcode']) for sortcode in sortcodes: mask4 = mask3 & (tsq['sortcode'] == sortcode) nb_spike = np.sum(mask4) sr = tsq[mask4]['frequency'][0] waveformsize = tsq[mask4]['size'][0] - 10 if lazy: times = [] * pq.s waveforms = None else: times = (tsq[mask4]['timestamp'] - global_t_start) * pq.s dt = np.dtype( data_formats[tsq[mask3]['dataformat'][0]]) waveforms = get_chunks( tsq[mask4]['size'], tsq[mask4]['eventoffset'], tev_array).view(dt) waveforms = waveforms.reshape( nb_spike, -1, waveformsize) waveforms = waveforms * pq.mV if nb_spike > 0: # t_start = (tsq['timestamp'][0] - global_t_start) * pq.s # this hould work but not t_start = 0 * pq.s t_stop = (tsq['timestamp'][-1] - global_t_start) * pq.s else: t_start = 0 * pq.s t_stop = 0 * pq.s st = SpikeTrain( times=times, name='Chan{0} Code{1}'.format( channel, sortcode), t_start=t_start, t_stop=t_stop, waveforms=waveforms, left_sweep=waveformsize / 2. / sr * pq.s, sampling_rate=sr * pq.Hz, ) st.annotate(channel_index=channel) if lazy: st.lazy_shape = nb_spike seg.spiketrains.append(st) elif type_label == 'EVTYPE_STREAM': dt = np.dtype( data_formats[tsq[mask3]['dataformat'][0]]) shape = np.sum(tsq[mask3]['size'] - 10) sr = tsq[mask3]['frequency'][0] if lazy: signal = [] else: if PY3K: signame = code.decode('ascii') else: signame = code sev_filename = os.path.join( subdir, tankname + '_' + blockname + '_' + signame + '_ch' + str(channel) + '.sev') try: #sig_array = np.memmap(sev_filename, mode = 'r', dtype = 'uint8') # if memory problem use this instead sig_array = np.fromfile(sev_filename, dtype='uint8') except IOError: sig_array = tev_array signal = get_chunks(tsq[mask3]['size'], tsq[mask3]['eventoffset'], sig_array).view(dt) anasig = AnalogSignal( signal=signal * pq.V, name='{0} {1}'.format(code, channel), sampling_rate=sr * pq.Hz, t_start=(tsq[mask3]['timestamp'][0] - global_t_start) * pq.s, channel_index=int(channel)) if lazy: anasig.lazy_shape = shape seg.analogsignals.append(anasig) return seg
def read_segment( self, lazy=False, cascade=True, delimiter='\t', usecols=None, skiprows=0, timecolumn=None, sampling_rate=1. * pq.Hz, t_start=0. * pq.s, unit=pq.V, method='genfromtxt', ): """ Arguments: delimiter : columns delimiter in file '\t' or one space or two space or ',' or ';' usecols : if None take all columns otherwise a list for selected columns skiprows : skip n first lines in case they contains header informations timecolumn : None or a valid int that point the time vector samplerate : the samplerate of signals if timecolumn is not None this is not take in account t_start : time of the first sample unit : unit of AnalogSignal can be a str or directly a Quantities method : 'genfromtxt' or 'csv' or 'homemade' in case of bugs you can try one of this methods 'genfromtxt' use numpy.genfromtxt 'csv' use cvs module 'homemade' use a intuitive more robust but slow method """ seg = Segment(file_origin=os.path.basename(self.filename)) if not cascade: return seg if type(sampling_rate) == float or type(sampling_rate) == int: # if not quantitities Hz by default sampling_rate = sampling_rate * pq.Hz if type(t_start) == float or type(t_start) == int: # if not quantitities s by default t_start = t_start * pq.s unit = pq.Quantity(1, unit) #loadtxt if method == 'genfromtxt': sig = np.genfromtxt(self.filename, delimiter=delimiter, usecols=usecols, skiprows=skiprows, dtype='f') if len(sig.shape) == 1: sig = sig[:, np.newaxis] elif method == 'csv': tab = [ l for l in csv.reader(file(self.filename, 'rU'), delimiter=delimiter) ] tab = tab[skiprows:] sig = np.array(tab, dtype='f') elif method == 'homemade': fid = open(self.filename, 'rU') for l in range(skiprows): fid.readline() tab = [] for line in fid.readlines(): line = line.replace('\r', '') line = line.replace('\n', '') l = line.split(delimiter) while '' in l: l.remove('') tab.append(l) sig = np.array(tab, dtype='f') if timecolumn is not None: sampling_rate = 1. / np.mean(np.diff(sig[:, timecolumn])) * pq.Hz t_start = sig[0, timecolumn] * pq.s for i in range(sig.shape[1]): if timecolumn == i: continue if usecols is not None and i not in usecols: continue if lazy: signal = [] * unit else: signal = sig[:, i] * unit anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, channel_index=i, name='Column %d' % i) if lazy: anaSig.lazy_shape = sig.shape seg.analogsignals.append(anaSig) create_many_to_one_relationship(seg) return seg
def read_analogsignal( self, # the 2 first key arguments are imposed by neo.io lazy=False, cascade=True, #channel index as given by the neuroshare API channel_index=0, #time in seconds to be read segment_duration=0., #time in seconds to start reading from t_start=0., ): #some controls: #if no segment duration is given, use the complete file if segment_duration == 0.: segment_duration = float(self.metadata["TimeSpan"]) #if the segment duration is bigger than file, use the complete file if segment_duration >= float(self.metadata["TimeSpan"]): segment_duration = float(self.metadata["TimeSpan"]) if lazy: anasig = AnalogSignal( [], units="V", sampling_rate=self.metadata["sampRate"] * pq.Hz, t_start=t_start * pq.s, ) #create a dummie time vector tvect = np.arange(t_start, t_start + segment_duration, 1. / self.metadata["sampRate"]) # we add the attribute lazy_shape with the size if loaded anasig.lazy_shape = tvect.shape else: #get the analog object sig = self.fd.get_entity(channel_index) #get the units (V, mV etc) sigUnits = sig.units #get the electrode number chanName = sig.label[-4:] #transform t_start into index (reading will start from this index) startat = int(t_start * self.metadata["sampRate"]) #get the number of bins to read in bins = int(segment_duration * self.metadata["sampRate"]) #if the number of bins to read is bigger than #the total number of bins, read only till the end of analog object if startat + bins > sig.item_count: bins = sig.item_count - startat #read the data from the sig object sig, _, _ = sig.get_data(index=startat, count=bins) #store it to the 'AnalogSignal' object anasig = AnalogSignal(sig, units=sigUnits, sampling_rate=self.metadata["sampRate"] * pq.Hz, t_start=t_start * pq.s, t_stop=(t_start + segment_duration) * pq.s, channel_index=channel_index) # annotate from which electrode the signal comes from anasig.annotate(info="signal from channel %s" % chanName) return anasig
def read_segment(self, cascade=True, lazy=False, ): """ Arguments: """ f = StructFile(open(self.filename, 'rb')) # Name f.seek(64, 0) surname = f.read(22).decode('ascii') while surname[-1] == ' ': if len(surname) == 0: break surname = surname[:-1] firstname = f.read(20).decode('ascii') while firstname[-1] == ' ': if len(firstname) == 0: break firstname = firstname[:-1] #Date f.seek(128, 0) day, month, year, hour, minute, sec = f.read_f('bbbbbb') rec_datetime = datetime.datetime(year + 1900, month, day, hour, minute, sec) f.seek(138, 0) Data_Start_Offset, Num_Chan, Multiplexer, Rate_Min, Bytes = f.read_f( 'IHHHH') #~ print Num_Chan, Bytes #header version f.seek(175, 0) header_version, = f.read_f('b') assert header_version == 4 seg = Segment(name=str(firstname + ' ' + surname), file_origin=os.path.basename(self.filename)) seg.annotate(surname=surname) seg.annotate(firstname=firstname) seg.annotate(rec_datetime=rec_datetime) if not cascade: f.close() return seg # area f.seek(176, 0) zone_names = ['ORDER', 'LABCOD', 'NOTE', 'FLAGS', 'TRONCA', 'IMPED_B', 'IMPED_E', 'MONTAGE', 'COMPRESS', 'AVERAGE', 'HISTORY', 'DVIDEO', 'EVENT A', 'EVENT B', 'TRIGGER'] zones = {} for zname in zone_names: zname2, pos, length = f.read_f('8sII') zones[zname] = zname2, pos, length #~ print zname2, pos, length # reading raw data if not lazy: f.seek(Data_Start_Offset, 0) rawdata = np.fromstring(f.read(), dtype='u' + str(Bytes)) rawdata = rawdata.reshape((-1, Num_Chan)) # Reading Code Info zname2, pos, length = zones['ORDER'] f.seek(pos, 0) code = np.fromstring(f.read(Num_Chan*2), dtype='u2', count=Num_Chan) units = {-1: pq.nano * pq.V, 0: pq.uV, 1: pq.mV, 2: 1, 100: pq.percent, 101: pq.dimensionless, 102: pq.dimensionless} for c in range(Num_Chan): zname2, pos, length = zones['LABCOD'] f.seek(pos + code[c] * 128 + 2, 0) label = f.read(6).strip(b"\x00").decode('ascii') ground = f.read(6).strip(b"\x00").decode('ascii') (logical_min, logical_max, logical_ground, physical_min, physical_max) = f.read_f('iiiii') k, = f.read_f('h') if k in units.keys(): unit = units[k] else: unit = pq.uV f.seek(8, 1) sampling_rate, = f.read_f('H') * pq.Hz sampling_rate *= Rate_Min if lazy: signal = [] * unit else: factor = float(physical_max - physical_min) / float( logical_max - logical_min + 1) signal = (rawdata[:, c].astype( 'f') - logical_ground) * factor * unit ana_sig = AnalogSignal(signal, sampling_rate=sampling_rate, name=str(label), channel_index=c) if lazy: ana_sig.lazy_shape = None ana_sig.annotate(ground=ground) seg.analogsignals.append(ana_sig) sampling_rate = np.mean( [ana_sig.sampling_rate for ana_sig in seg.analogsignals]) * pq.Hz # Read trigger and notes for zname, label_dtype in [('TRIGGER', 'u2'), ('NOTE', 'S40')]: zname2, pos, length = zones[zname] f.seek(pos, 0) triggers = np.fromstring(f.read(length), dtype=[('pos', 'u4'), ( 'label', label_dtype)]) if not lazy: keep = (triggers['pos'] >= triggers['pos'][0]) & ( triggers['pos'] < rawdata.shape[0]) & ( triggers['pos'] != 0) triggers = triggers[keep] ea = Event(name=zname[0] + zname[1:].lower(), labels=triggers['label'].astype('S'), times=(triggers['pos'] / sampling_rate).rescale('s')) else: ea = Event(name=zname[0] + zname[1:].lower()) ea.lazy_shape = triggers.size seg.events.append(ea) # Read Event A and B # Not so well tested for zname in ['EVENT A', 'EVENT B']: zname2, pos, length = zones[zname] f.seek(pos, 0) epochs = np.fromstring(f.read(length), dtype=[('label', 'u4'), ('start', 'u4'), ('stop', 'u4'), ]) ep = Epoch(name=zname[0] + zname[1:].lower()) if not lazy: keep = (epochs['start'] > 0) & ( epochs['start'] < rawdata.shape[0]) & ( epochs['stop'] < rawdata.shape[0]) epochs = epochs[keep] ep = Epoch(name=zname[0] + zname[1:].lower(), labels=epochs['label'].astype('S'), times=(epochs['start'] / sampling_rate).rescale('s'), durations=((epochs['stop'] - epochs['start']) / sampling_rate).rescale('s')) else: ep = Epoch(name=zname[0] + zname[1:].lower()) ep.lazy_shape = triggers.size seg.epochs.append(ep) seg.create_many_to_one_relationship() f.close() return seg
def read_segment(self, lazy = False, cascade = True): ## Read header file (vhdr) header = readBrainSoup(self.filename) assert header['Common Infos']['DataFormat'] == 'BINARY', NotImplementedError assert header['Common Infos']['DataOrientation'] == 'MULTIPLEXED', NotImplementedError nb_channel = int(header['Common Infos']['NumberOfChannels']) sampling_rate = 1.e6/float(header['Common Infos']['SamplingInterval']) * pq.Hz fmt = header['Binary Infos']['BinaryFormat'] fmts = { 'INT_16':np.int16, 'IEEE_FLOAT_32':np.float32,} assert fmt in fmts, NotImplementedError dt = fmts[fmt] seg = Segment(file_origin = os.path.basename(self.filename), ) if not cascade : return seg # read binary if not lazy: binary_file = os.path.splitext(self.filename)[0]+'.eeg' sigs = np.memmap(binary_file , dt, 'r', ).astype('f') n = int(sigs.size/nb_channel) sigs = sigs[:n*nb_channel] sigs = sigs.reshape(n, nb_channel) for c in range(nb_channel): name, ref, res, units = header['Channel Infos']['Ch%d' % (c+1,)].split(',') units = pq.Quantity(1, units.replace('µ', 'u') ) if lazy: signal = [ ]*units else: signal = sigs[:,c]*units anasig = AnalogSignal(signal = signal, channel_index = c, name = name, sampling_rate = sampling_rate, ) if lazy: anasig.lazy_shape = -1 seg.analogsignals.append(anasig) # read marker marker_file = os.path.splitext(self.filename)[0]+'.vmrk' all_info = readBrainSoup(marker_file)['Marker Infos'] all_types = [ ] times = [ ] labels = [ ] for i in range(len(all_info)): type_, label, pos, size, channel = all_info['Mk%d' % (i+1,)].split(',')[:5] all_types.append(type_) times.append(float(pos)/sampling_rate.magnitude) labels.append(label) all_types = np.array(all_types) times = np.array(times) * pq.s labels = np.array(labels, dtype = 'S') for type_ in np.unique(all_types): ind = type_ == all_types if lazy: ea = EventArray(name = str(type_)) ea.lazy_shape = -1 else: ea = EventArray( times = times[ind], labels = labels[ind], name = str(type_), ) seg.eventarrays.append(ea) seg.create_many_to_one_relationship() return seg
def read_block( self, lazy=False, cascade=True, ): bl = Block(file_origin=os.path.basename(self.filename), ) if not cascade: return bl fid = open(self.filename, 'rb') headertext = fid.read(1024) if PY3K: headertext = headertext.decode('ascii') header = {} for line in headertext.split('\r\n'): if '=' not in line: continue #print '#' , line , '#' key, val = line.split('=') if key in [ 'NC', 'NR', 'NBH', 'NBA', 'NBD', 'ADCMAX', 'NP', 'NZ', ]: val = int(val) elif key in [ 'AD', 'DT', ]: val = val.replace(',', '.') val = float(val) header[key] = val #print header SECTORSIZE = 512 # loop for record number for i in range(header['NR']): #print 'record ',i offset = 1024 + i * (SECTORSIZE * header['NBD'] + 1024) # read analysis zone analysisHeader = HeaderReader( fid, AnalysisDescription).read_f(offset=offset) #print analysisHeader # read data NP = (SECTORSIZE * header['NBD']) / 2 NP = NP - NP % header['NC'] NP = int(NP // header['NC']) if not lazy: data = np.memmap(self.filename, np.dtype('int16'), mode='r', shape=( NP, header['NC'], ), offset=offset + header['NBA'] * SECTORSIZE) # create a segment seg = Segment() bl.segments.append(seg) for c in range(header['NC']): unit = header['YU%d' % c] try: unit = pq.Quantity(1., unit) except: unit = pq.Quantity(1., '') if lazy: signal = [] * unit else: YG = float(header['YG%d' % c].replace(',', '.')) ADCMAX = header['ADCMAX'] VMax = analysisHeader['VMax'][c] chan = int(header['YO%d' % c]) signal = data[:, chan].astype( 'f4') * VMax / ADCMAX / YG * unit anaSig = AnalogSignal( signal, sampling_rate=pq.Hz / analysisHeader['SamplingInterval'], t_start=analysisHeader['TimeRecorded'] * pq.s, name=header['YN%d' % c], channel_index=c) if lazy: anaSig.lazy_shape = NP seg.analogsignals.append(anaSig) fid.close() bl.create_many_to_one_relationship() return bl
def read_segment(self, lazy=False, cascade=True): fid = open(self.filename, 'rb') global_header = HeaderReader(fid, GlobalHeader).read_f(offset=0) # ~ print globalHeader #~ print 'version' , globalHeader['version'] seg = Segment() seg.file_origin = os.path.basename(self.filename) seg.annotate(neuroexplorer_version=global_header['version']) seg.annotate(comment=global_header['comment']) if not cascade: return seg offset = 544 for i in range(global_header['nvar']): entity_header = HeaderReader(fid, EntityHeader).read_f( offset=offset + i * 208) entity_header['name'] = entity_header['name'].replace('\x00', '') #print 'i',i, entityHeader['type'] if entity_header['type'] == 0: # neuron if lazy: spike_times = [] * pq.s else: spike_times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) spike_times = spike_times.astype('f8') / global_header[ 'freq'] * pq.s sptr = SpikeTrain( times=spike_times, t_start=global_header['tbeg'] / global_header['freq'] * pq.s, t_stop=global_header['tend'] / global_header['freq'] * pq.s, name=entity_header['name']) if lazy: sptr.lazy_shape = entity_header['n'] sptr.annotate(channel_index=entity_header['WireNumber']) seg.spiketrains.append(sptr) if entity_header['type'] == 1: # event if lazy: event_times = [] * pq.s else: event_times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) event_times = event_times.astype('f8') / global_header[ 'freq'] * pq.s labels = np.array([''] * event_times.size, dtype='S') evar = Event(times=event_times, labels=labels, channel_name=entity_header['name']) if lazy: evar.lazy_shape = entity_header['n'] seg.events.append(evar) if entity_header['type'] == 2: # interval if lazy: start_times = [] * pq.s stop_times = [] * pq.s else: start_times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) start_times = start_times.astype('f8') / global_header[ 'freq'] * pq.s stop_times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset'] + entity_header['n'] * 4) stop_times = stop_times.astype('f') / global_header[ 'freq'] * pq.s epar = Epoch(times=start_times, durations=stop_times - start_times, labels=np.array([''] * start_times.size, dtype='S'), channel_name=entity_header['name']) if lazy: epar.lazy_shape = entity_header['n'] seg.epochs.append(epar) if entity_header['type'] == 3: # spiketrain and wavefoms if lazy: spike_times = [] * pq.s waveforms = None else: spike_times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) spike_times = spike_times.astype('f8') / global_header[ 'freq'] * pq.s waveforms = np.memmap(self.filename, np.dtype('i2'), 'r', shape=(entity_header['n'], 1, entity_header['NPointsWave']), offset=entity_header['offset'] + entity_header['n'] * 4) waveforms = (waveforms.astype('f') * entity_header['ADtoMV'] + entity_header['MVOffset']) * pq.mV t_stop = global_header['tend'] / global_header['freq'] * pq.s if spike_times.size > 0: t_stop = max(t_stop, max(spike_times)) sptr = SpikeTrain( times=spike_times, t_start=global_header['tbeg'] / global_header['freq'] * pq.s, #~ t_stop = max(globalHeader['tend']/ #~ globalHeader['freq']*pq.s,max(spike_times)), t_stop=t_stop, name=entity_header['name'], waveforms=waveforms, sampling_rate=entity_header['WFrequency'] * pq.Hz, left_sweep=0 * pq.ms) if lazy: sptr.lazy_shape = entity_header['n'] sptr.annotate(channel_index=entity_header['WireNumber']) seg.spiketrains.append(sptr) if entity_header['type'] == 4: # popvectors pass if entity_header['type'] == 5: # analog timestamps = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) timestamps = timestamps.astype('f8') / global_header['freq'] fragment_starts = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) fragment_starts = fragment_starts.astype('f8') / global_header[ 'freq'] t_start = timestamps[0] - fragment_starts[0] / float( entity_header['WFrequency']) del timestamps, fragment_starts if lazy: signal = [] * pq.mV else: signal = np.memmap(self.filename, np.dtype('i2'), 'r', shape=(entity_header['NPointsWave']), offset=entity_header['offset']) signal = signal.astype('f') signal *= entity_header['ADtoMV'] signal += entity_header['MVOffset'] signal = signal * pq.mV ana_sig = AnalogSignal( signal=signal, t_start=t_start * pq.s, sampling_rate=entity_header['WFrequency'] * pq.Hz, name=entity_header['name'], channel_index=entity_header['WireNumber']) if lazy: ana_sig.lazy_shape = entity_header['NPointsWave'] seg.analogsignals.append(ana_sig) if entity_header['type'] == 6: # markers : TO TEST if lazy: times = [] * pq.s labels = np.array([], dtype='S') markertype = None else: times = np.memmap(self.filename, np.dtype('i4'), 'r', shape=(entity_header['n']), offset=entity_header['offset']) times = times.astype('f8') / global_header['freq'] * pq.s fid.seek(entity_header['offset'] + entity_header['n'] * 4) markertype = fid.read(64).replace('\x00', '') labels = np.memmap( self.filename, np.dtype( 'S' + str(entity_header['MarkerLength'])), 'r', shape=(entity_header['n']), offset=entity_header['offset'] + entity_header['n'] * 4 + 64) ea = Event(times=times, labels=labels.view(np.ndarray), name=entity_header['name'], channel_index=entity_header['WireNumber'], marker_type=markertype) if lazy: ea.lazy_shape = entity_header['n'] seg.events.append(ea) seg.create_many_to_one_relationship() return seg
def readOneChannelContinuous(self, fid, channel_num, header, take_ideal_sampling_rate, lazy=True): # read AnalogSignal channelHeader = header.channelHeaders[channel_num] # data type if channelHeader.kind == 1: dt = np.dtype('i2') elif channelHeader.kind == 9: dt = np.dtype('f4') # sample rate if take_ideal_sampling_rate: sampling_rate = channelHeader.ideal_rate * pq.Hz else: if header.system_id in [1, 2, 3, 4, 5]: # Before version 5 #~ print channel_num, channelHeader.divide, header.us_per_time, header.time_per_adc sample_interval = (channelHeader.divide * header.us_per_time * header.time_per_adc) * 1e-6 else: sample_interval = (channelHeader.l_chan_dvd * header.us_per_time * header.dtime_base) sampling_rate = (1. / sample_interval) * pq.Hz # read blocks header to preallocate memory by jumping block to block if channelHeader.blocks == 0: return [] fid.seek(channelHeader.firstblock) blocksize = [0] starttimes = [] for b in range(channelHeader.blocks): blockHeader = HeaderReader(fid, np.dtype(blockHeaderDesciption)) if len(blocksize) > len(starttimes): starttimes.append(blockHeader.start_time) blocksize[-1] += blockHeader.items if blockHeader.succ_block > 0: # this is ugly but CED do not garanty continuity in AnalogSignal fid.seek(blockHeader.succ_block) nextBlockHeader = HeaderReader(fid, np.dtype(blockHeaderDesciption)) sample_interval = (blockHeader.end_time - blockHeader.start_time) / ( blockHeader.items - 1) interval_with_next = nextBlockHeader.start_time - blockHeader.end_time if interval_with_next > sample_interval: blocksize.append(0) fid.seek(blockHeader.succ_block) anaSigs = [] if channelHeader.unit in unit_convert: unit = pq.Quantity(1, unit_convert[channelHeader.unit]) else: #print channelHeader.unit try: unit = pq.Quantity(1, channelHeader.unit) except: unit = pq.Quantity(1, '') for b, bs in enumerate(blocksize): if lazy: signal = [] * unit else: signal = pq.Quantity(np.empty(bs, dtype='f4'), units=unit) anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=(starttimes[b] * header.us_per_time * header.dtime_base * pq.s), channel_index=channel_num) anaSigs.append(anaSig) if lazy: for s, anaSig in enumerate(anaSigs): anaSig.lazy_shape = blocksize[s] else: # read data by jumping block to block fid.seek(channelHeader.firstblock) pos = 0 numblock = 0 for b in range(channelHeader.blocks): blockHeader = HeaderReader(fid, np.dtype(blockHeaderDesciption)) # read data sig = np.fromstring(fid.read(blockHeader.items * dt.itemsize), dtype=dt) anaSigs[numblock][pos:pos + sig.size] = sig.astype('f4') * unit pos += sig.size if pos >= blocksize[numblock]: numblock += 1 pos = 0 # jump to next block if blockHeader.succ_block > 0: fid.seek(blockHeader.succ_block) # convert for int16 if dt.kind == 'i': for anaSig in anaSigs: anaSig *= channelHeader.scale / 6553.6 anaSig += channelHeader.offset * unit return anaSigs
def read_segment( self, cascade=True, lazy=False, sampling_rate=1.0 * pq.Hz, t_start=0.0 * pq.s, unit=pq.V, nbchannel=1, bytesoffset=0, dtype="f4", rangemin=-10, rangemax=10, ): """ Reading signal in a raw binary interleaved compact file. Arguments: sampling_rate : sample rate t_start : time of the first sample sample of each channel unit: unit of AnalogSignal can be a str or directly a Quantities nbchannel : number of channel bytesoffset : nb of bytes offset at the start of file dtype : dtype of the data rangemin , rangemax : if the dtype is integer, range can give in volt the min and the max of the range """ seg = Segment(file_origin=os.path.basename(self.filename)) if not cascade: return seg dtype = np.dtype(dtype) if type(sampling_rate) == float or type(sampling_rate) == int: # if not quantitities Hz by default sampling_rate = sampling_rate * pq.Hz if type(t_start) == float or type(t_start) == int: # if not quantitities s by default t_start = t_start * pq.s unit = pq.Quantity(1, unit) if not lazy: sig = np.memmap(self.filename, dtype=dtype, mode="r", offset=bytesoffset) if sig.size % nbchannel != 0: sig = sig[: -sig.size % nbchannel] sig = sig.reshape((sig.size / nbchannel, nbchannel)) if dtype.kind == "i": sig = sig.astype("f") sig /= 2 ** (8 * dtype.itemsize) sig *= rangemax - rangemin sig += (rangemax + rangemin) / 2.0 elif dtype.kind == "u": sig = sig.astype("f") sig /= 2 ** (8 * dtype.itemsize) sig *= rangemax - rangemin sig += rangemin sig_with_units = pq.Quantity(sig, units=unit, copy=False) for i in range(nbchannel): if lazy: signal = [] * unit else: signal = sig_with_units[:, i] anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, channel_index=i, copy=False) if lazy: # TODO anaSig.lazy_shape = None seg.analogsignals.append(anaSig) seg.create_many_to_one_relationship() return seg
def read_segment(self, import_neuroshare_segment = True, lazy=False, cascade=True): """ Arguments: import_neuroshare_segment: import neuroshare segment as SpikeTrain with associated waveforms or not imported at all. """ seg = Segment( file_origin = os.path.basename(self.filename), ) if sys.platform.startswith('win'): neuroshare = ctypes.windll.LoadLibrary(self.dllname) elif sys.platform.startswith('linux'): neuroshare = ctypes.cdll.LoadLibrary(self.dllname) neuroshare = DllWithError(neuroshare) #elif sys.platform.startswith('darwin'): # API version info = ns_LIBRARYINFO() neuroshare.ns_GetLibraryInfo(ctypes.byref(info) , ctypes.sizeof(info)) seg.annotate(neuroshare_version = str(info.dwAPIVersionMaj)+'.'+str(info.dwAPIVersionMin)) if not cascade: return seg # open file hFile = ctypes.c_uint32(0) neuroshare.ns_OpenFile(ctypes.c_char_p(self.filename) ,ctypes.byref(hFile)) fileinfo = ns_FILEINFO() neuroshare.ns_GetFileInfo(hFile, ctypes.byref(fileinfo) , ctypes.sizeof(fileinfo)) # read all entities for dwEntityID in range(fileinfo.dwEntityCount): entityInfo = ns_ENTITYINFO() neuroshare.ns_GetEntityInfo( hFile, dwEntityID, ctypes.byref(entityInfo), ctypes.sizeof(entityInfo)) # EVENT if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_EVENT': pEventInfo = ns_EVENTINFO() neuroshare.ns_GetEventInfo ( hFile, dwEntityID, ctypes.byref(pEventInfo), ctypes.sizeof(pEventInfo)) if pEventInfo.dwEventType == 0: #TEXT pData = ctypes.create_string_buffer(pEventInfo.dwMaxDataLength) elif pEventInfo.dwEventType == 1:#CVS pData = ctypes.create_string_buffer(pEventInfo.dwMaxDataLength) elif pEventInfo.dwEventType == 2:# 8bit pData = ctypes.c_byte(0) elif pEventInfo.dwEventType == 3:# 16bit pData = ctypes.c_int16(0) elif pEventInfo.dwEventType == 4:# 32bit pData = ctypes.c_int32(0) pdTimeStamp = ctypes.c_double(0.) pdwDataRetSize = ctypes.c_uint32(0) ea = Event(name = str(entityInfo.szEntityLabel),) if not lazy: times = [ ] labels = [ ] for dwIndex in range(entityInfo.dwItemCount ): neuroshare.ns_GetEventData ( hFile, dwEntityID, dwIndex, ctypes.byref(pdTimeStamp), ctypes.byref(pData), ctypes.sizeof(pData), ctypes.byref(pdwDataRetSize) ) times.append(pdTimeStamp.value) labels.append(str(pData.value)) ea.times = times*pq.s ea.labels = np.array(labels, dtype ='S') else : ea.lazy_shape = entityInfo.dwItemCount seg.eventarrays.append(ea) # analog if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_ANALOG': pAnalogInfo = ns_ANALOGINFO() neuroshare.ns_GetAnalogInfo( hFile, dwEntityID,ctypes.byref(pAnalogInfo),ctypes.sizeof(pAnalogInfo) ) dwIndexCount = entityInfo.dwItemCount if lazy: signal = [ ]*pq.Quantity(1, pAnalogInfo.szUnits) else: pdwContCount = ctypes.c_uint32(0) pData = np.zeros( (entityInfo.dwItemCount,), dtype = 'float64') total_read = 0 while total_read< entityInfo.dwItemCount: dwStartIndex = ctypes.c_uint32(total_read) dwStopIndex = ctypes.c_uint32(entityInfo.dwItemCount - total_read) neuroshare.ns_GetAnalogData( hFile, dwEntityID, dwStartIndex, dwStopIndex, ctypes.byref( pdwContCount) , pData[total_read:].ctypes.data_as(ctypes.POINTER(ctypes.c_double))) total_read += pdwContCount.value signal = pq.Quantity(pData, units=pAnalogInfo.szUnits, copy = False) #t_start dwIndex = 0 pdTime = ctypes.c_double(0) neuroshare.ns_GetTimeByIndex( hFile, dwEntityID, dwIndex, ctypes.byref(pdTime)) anaSig = AnalogSignal(signal, sampling_rate = pAnalogInfo.dSampleRate*pq.Hz, t_start = pdTime.value * pq.s, name = str(entityInfo.szEntityLabel), ) anaSig.annotate( probe_info = str(pAnalogInfo.szProbeInfo)) if lazy: anaSig.lazy_shape = entityInfo.dwItemCount seg.analogsignals.append( anaSig ) #segment if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_SEGMENT' and import_neuroshare_segment: pdwSegmentInfo = ns_SEGMENTINFO() if not str(entityInfo.szEntityLabel).startswith('spks'): continue neuroshare.ns_GetSegmentInfo( hFile, dwEntityID, ctypes.byref(pdwSegmentInfo), ctypes.sizeof(pdwSegmentInfo) ) nsource = pdwSegmentInfo.dwSourceCount pszMsgBuffer = ctypes.create_string_buffer(" "*256) neuroshare.ns_GetLastErrorMsg(ctypes.byref(pszMsgBuffer), 256) for dwSourceID in range(pdwSegmentInfo.dwSourceCount) : pSourceInfo = ns_SEGSOURCEINFO() neuroshare.ns_GetSegmentSourceInfo( hFile, dwEntityID, dwSourceID, ctypes.byref(pSourceInfo), ctypes.sizeof(pSourceInfo) ) if lazy: sptr = SpikeTrain(times, name = str(entityInfo.szEntityLabel), t_stop = 0.*pq.s) sptr.lazy_shape = entityInfo.dwItemCount else: pdTimeStamp = ctypes.c_double(0.) dwDataBufferSize = pdwSegmentInfo.dwMaxSampleCount*pdwSegmentInfo.dwSourceCount pData = np.zeros( (dwDataBufferSize), dtype = 'float64') pdwSampleCount = ctypes.c_uint32(0) pdwUnitID= ctypes.c_uint32(0) nsample = int(dwDataBufferSize) times = np.empty( (entityInfo.dwItemCount), dtype = 'f') waveforms = np.empty( (entityInfo.dwItemCount, nsource, nsample), dtype = 'f') for dwIndex in range(entityInfo.dwItemCount ): neuroshare.ns_GetSegmentData ( hFile, dwEntityID, dwIndex, ctypes.byref(pdTimeStamp), pData.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), dwDataBufferSize * 8, ctypes.byref(pdwSampleCount), ctypes.byref(pdwUnitID ) ) times[dwIndex] = pdTimeStamp.value waveforms[dwIndex, :,:] = pData[:nsample*nsource].reshape(nsample ,nsource).transpose() sptr = SpikeTrain(times = pq.Quantity(times, units = 's', copy = False), t_stop = times.max(), waveforms = pq.Quantity(waveforms, units = str(pdwSegmentInfo.szUnits), copy = False ), left_sweep = nsample/2./float(pdwSegmentInfo.dSampleRate)*pq.s, sampling_rate = float(pdwSegmentInfo.dSampleRate)*pq.Hz, name = str(entityInfo.szEntityLabel), ) seg.spiketrains.append(sptr) # neuralevent if entity_types[entityInfo.dwEntityType] == 'ns_ENTITY_NEURALEVENT': pNeuralInfo = ns_NEURALINFO() neuroshare.ns_GetNeuralInfo ( hFile, dwEntityID, ctypes.byref(pNeuralInfo), ctypes.sizeof(pNeuralInfo)) if lazy: times = [ ]*pq.s t_stop = 0*pq.s else: pData = np.zeros( (entityInfo.dwItemCount,), dtype = 'float64') dwStartIndex = 0 dwIndexCount = entityInfo.dwItemCount neuroshare.ns_GetNeuralData( hFile, dwEntityID, dwStartIndex, dwIndexCount, pData.ctypes.data_as(ctypes.POINTER(ctypes.c_double))) times = pData*pq.s t_stop = times.max() sptr = SpikeTrain(times, t_stop =t_stop, name = str(entityInfo.szEntityLabel),) if lazy: sptr.lazy_shape = entityInfo.dwItemCount seg.spiketrains.append(sptr) # close neuroshare.ns_CloseFile(hFile) seg.create_many_to_one_relationship() return seg
def read_nsx(self, filename_nsx, seg, lazy, cascade): # basic header dt0 = [ ('header_id', 'S8'), ('ver_major', 'uint8'), ('ver_minor', 'uint8'), ('header_size', 'uint32'), #i.e. index of first data ('group_label', 'S16'), # Read number of packet bytes, i.e. byte per samplepos ('comments', 'S256'), ('period_ratio', 'uint32'), ('sampling_rate', 'uint32'), ('window_datetime', 'S16'), ('nb_channel', 'uint32'), ] nsx_header = h = np.fromfile(filename_nsx, count=1, dtype=dt0)[0] version = '{}.{}'.format(h['ver_major'], h['ver_minor']) seg.annotate(blackrock_version=version) seg.rec_datetime = get_window_datetime(nsx_header['window_datetime']) nb_channel = h['nb_channel'] sr = float(h['sampling_rate']) / h['period_ratio'] if not cascade: return # extented header = channel information dt1 = [ ('header_id', 'S2'), ('channel_id', 'uint16'), ('label', 'S16'), ('connector_id', 'uint8'), ('connector_pin', 'uint8'), ('min_digital_val', 'int16'), ('max_digital_val', 'int16'), ('min_analog_val', 'int16'), ('max_analog_val', 'int16'), ('units', 'S16'), ('hi_freq_corner', 'uint32'), ('hi_freq_order', 'uint32'), ('hi_freq_type', 'uint16'), #0=None 1=Butterworth ('lo_freq_corner', 'uint32'), ('lo_freq_order', 'uint32'), ('lo_freq_type', 'uint16'), #0=None 1=Butterworth ] channels_header = ch = np.memmap(filename_nsx, shape=nb_channel, offset=np.dtype(dt0).itemsize, dtype=dt1) #read data dt2 = [ ('header_id', 'uint8'), ('n_start', 'uint32'), ('nb_sample', 'uint32'), ] sample_header = sh = np.memmap(filename_nsx, dtype=dt2, shape=1, offset=nsx_header['header_size'])[0] nb_sample = sample_header['nb_sample'] data = np.memmap(filename_nsx, dtype='int16', shape=(nb_sample, nb_channel), offset=nsx_header['header_size'] + np.dtype(dt2).itemsize) # create ne objects for i in range(nb_channel): unit = str(channels_header['units'][i]) if lazy: sig = [] else: sig = data[:, i].astype(float) # dig value to pysical value if ch['max_analog_val'][i] == -ch['min_analog_val'][i] and\ ch['max_digital_val'][i] == -ch['min_digital_val'][i]: #when symetric it is simple sig *= float(ch['max_analog_val'][i]) / float( ch['max_digital_val'][i]) else: #general case sig -= ch['min_digital_val'][i] sig *= float(ch['max_analog_val'][i] - ch['min_analog_val'])/\ float(ch['max_digital_val'][i] - ch['min_digital_val']) sig += float(ch['min_analog_val'][i]) anasig = AnalogSignal(signal=pq.Quantity(sig, unit, copy=False), sampling_rate=sr * pq.Hz, t_start=sample_header['n_start'] / sr * pq.s, name=str(ch['label'][i]), channel_index=int(ch['channel_id'][i])) if lazy: anasig.lazy_shape = nb_sample seg.analogsignals.append(anasig)