def read_segment( self, lazy=False, cascade=True, ): fid = open(self.filename, 'rb') globalHeader = HeaderReader(fid, GlobalHeader).read_f(offset=0) #~ print globalHeader #~ print 'version' , globalHeader['version'] seg = Segment() seg.file_origin = os.path.basename(self.filename) seg.annotate(neuroexplorer_version=globalHeader['version']) seg.annotate(comment=globalHeader['comment']) if not cascade: return seg offset = 544 for i in range(globalHeader['nvar']): entityHeader = HeaderReader( fid, EntityHeader).read_f(offset=offset + i * 208) entityHeader['name'] = entityHeader['name'].replace('\x00', '') #print 'i',i, entityHeader['type'] if entityHeader['type'] == 0: # neuron if lazy: spike_times = [] * pq.s else: spike_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) spike_times = spike_times.astype( 'f8') / globalHeader['freq'] * pq.s sptr = SpikeTrain( times=spike_times, t_start=globalHeader['tbeg'] / globalHeader['freq'] * pq.s, t_stop=globalHeader['tend'] / globalHeader['freq'] * pq.s, name=entityHeader['name'], ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index=entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 1: # event if lazy: event_times = [] * pq.s else: event_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) event_times = event_times.astype( 'f8') / globalHeader['freq'] * pq.s labels = np.array([''] * event_times.size, dtype='S') evar = EventArray(times=event_times, labels=labels, channel_name=entityHeader['name']) if lazy: evar.lazy_shape = entityHeader['n'] seg.eventarrays.append(evar) if entityHeader['type'] == 2: # interval if lazy: start_times = [] * pq.s stop_times = [] * pq.s else: start_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) start_times = start_times.astype( 'f8') / globalHeader['freq'] * pq.s stop_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'] + entityHeader['n'] * 4, ) stop_times = stop_times.astype( 'f') / globalHeader['freq'] * pq.s epar = EpochArray(times=start_times, durations=stop_times - start_times, labels=np.array([''] * start_times.size, dtype='S'), channel_name=entityHeader['name']) if lazy: epar.lazy_shape = entityHeader['n'] seg.epocharrays.append(epar) if entityHeader['type'] == 3: # spiketrain and wavefoms if lazy: spike_times = [] * pq.s waveforms = None else: spike_times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) spike_times = spike_times.astype( 'f8') / globalHeader['freq'] * pq.s waveforms = np.memmap( self.filename, np.dtype('i2'), 'r', shape=(entityHeader['n'], 1, entityHeader['NPointsWave']), offset=entityHeader['offset'] + entityHeader['n'] * 4, ) waveforms = (waveforms.astype('f') * entityHeader['ADtoMV'] + entityHeader['MVOffset']) * pq.mV t_stop = globalHeader['tend'] / globalHeader['freq'] * pq.s if spike_times.size > 0: t_stop = max(t_stop, max(spike_times)) sptr = SpikeTrain( times=spike_times, t_start=globalHeader['tbeg'] / globalHeader['freq'] * pq.s, #~ t_stop = max(globalHeader['tend']/globalHeader['freq']*pq.s,max(spike_times)), t_stop=t_stop, name=entityHeader['name'], waveforms=waveforms, sampling_rate=entityHeader['WFrequency'] * pq.Hz, left_sweep=0 * pq.ms, ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index=entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 4: # popvectors pass if entityHeader['type'] == 5: # analog timestamps = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) timestamps = timestamps.astype('f8') / globalHeader['freq'] fragmentStarts = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) fragmentStarts = fragmentStarts.astype( 'f8') / globalHeader['freq'] t_start = timestamps[0] - fragmentStarts[0] / float( entityHeader['WFrequency']) del timestamps, fragmentStarts if lazy: signal = [] * pq.mV else: signal = np.memmap( self.filename, np.dtype('i2'), 'r', shape=(entityHeader['NPointsWave']), offset=entityHeader['offset'], ) signal = signal.astype('f') signal *= entityHeader['ADtoMV'] signal += entityHeader['MVOffset'] signal = signal * pq.mV anaSig = AnalogSignal( signal=signal, t_start=t_start * pq.s, sampling_rate=entityHeader['WFrequency'] * pq.Hz, name=entityHeader['name'], channel_index=entityHeader['WireNumber']) if lazy: anaSig.lazy_shape = entityHeader['NPointsWave'] seg.analogsignals.append(anaSig) if entityHeader['type'] == 6: # markers : TO TEST if lazy: times = [] * pq.s labels = np.array([], dtype='S') markertype = None else: times = np.memmap( self.filename, np.dtype('i4'), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'], ) times = times.astype('f8') / globalHeader['freq'] * pq.s fid.seek(entityHeader['offset'] + entityHeader['n'] * 4) markertype = fid.read(64).replace('\x00', '') labels = np.memmap( self.filename, np.dtype('S' + str(entityHeader['MarkerLength'])), 'r', shape=(entityHeader['n']), offset=entityHeader['offset'] + entityHeader['n'] * 4 + 64) ea = EventArray(times=times, labels=labels.view(np.ndarray), name=entityHeader['name'], channel_index=entityHeader['WireNumber'], marker_type=markertype) if lazy: ea.lazy_shape = entityHeader['n'] seg.eventarrays.append(ea) create_many_to_one_relationship(seg) return seg
def read_segment(self, lazy = False, cascade = True, ): fid = open(self.filename, 'rb') globalHeader = HeaderReader(fid , GlobalHeader ).read_f(offset = 0) #~ print globalHeader #~ print 'version' , globalHeader['version'] seg = Segment() seg.file_origin = os.path.basename(self.filename) seg.annotate(neuroexplorer_version = globalHeader['version']) seg.annotate(comment = globalHeader['comment']) if not cascade : return seg offset = 544 for i in range(globalHeader['nvar']): entityHeader = HeaderReader(fid , EntityHeader ).read_f(offset = offset+i*208) entityHeader['name'] = entityHeader['name'].decode().replace('\x00','') #print 'i',i, entityHeader['type'] if entityHeader['type'] == 0: # neuron if lazy: spike_times = [ ]*pq.s else: spike_times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) spike_times = spike_times.astype('f8')/globalHeader['freq']*pq.s sptr = SpikeTrain( times= spike_times, t_start = globalHeader['tbeg']/globalHeader['freq']*pq.s, t_stop = globalHeader['tend']/globalHeader['freq']*pq.s, name = entityHeader['name'], ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index = entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 1: # event if lazy: event_times = [ ]*pq.s else: event_times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) event_times = event_times.astype('f8')/globalHeader['freq'] * pq.s labels = np.array(['']*event_times.size, dtype = 'S') evar = EventArray(times = event_times, labels=labels, channel_name = entityHeader['name'] ) if lazy: evar.lazy_shape = entityHeader['n'] seg.eventarrays.append(evar) if entityHeader['type'] == 2: # interval if lazy: start_times = [ ]*pq.s stop_times = [ ]*pq.s else: start_times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) start_times = start_times.astype('f8')/globalHeader['freq']*pq.s stop_times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset']+entityHeader['n']*4, ) stop_times = stop_times.astype('f')/globalHeader['freq']*pq.s epar = EpochArray(times = start_times, durations = stop_times - start_times, labels = np.array(['']*start_times.size, dtype = 'S'), channel_name = entityHeader['name']) if lazy: epar.lazy_shape = entityHeader['n'] seg.epocharrays.append(epar) if entityHeader['type'] == 3: # spiketrain and wavefoms if lazy: spike_times = [ ]*pq.s waveforms = None else: spike_times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) spike_times = spike_times.astype('f8')/globalHeader['freq'] * pq.s waveforms = np.memmap(self.filename , np.dtype('i2') ,'r' , shape = (entityHeader['n'] , 1,entityHeader['NPointsWave']), offset = entityHeader['offset']+entityHeader['n'] *4, ) waveforms = (waveforms.astype('f')* entityHeader['ADtoMV'] + entityHeader['MVOffset'])*pq.mV t_stop = globalHeader['tend']/globalHeader['freq']*pq.s if spike_times.size>0: t_stop = max(t_stop, max(spike_times)) sptr = SpikeTrain( times = spike_times, t_start = globalHeader['tbeg']/globalHeader['freq']*pq.s, #~ t_stop = max(globalHeader['tend']/globalHeader['freq']*pq.s,max(spike_times)), t_stop = t_stop, name = entityHeader['name'], waveforms = waveforms, sampling_rate = entityHeader['WFrequency']*pq.Hz, left_sweep = 0*pq.ms, ) if lazy: sptr.lazy_shape = entityHeader['n'] sptr.annotate(channel_index = entityHeader['WireNumber']) seg.spiketrains.append(sptr) if entityHeader['type'] == 4: # popvectors pass if entityHeader['type'] == 5: # analog timestamps= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) timestamps = timestamps.astype('f8')/globalHeader['freq'] fragmentStarts = np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) fragmentStarts = fragmentStarts.astype('f8')/globalHeader['freq'] t_start = timestamps[0] - fragmentStarts[0]/float(entityHeader['WFrequency']) del timestamps, fragmentStarts if lazy : signal = [ ]*pq.mV else: signal = np.memmap(self.filename , np.dtype('i2') ,'r' , shape = (entityHeader['NPointsWave'] ), offset = entityHeader['offset'], ) signal = signal.astype('f') signal *= entityHeader['ADtoMV'] signal += entityHeader['MVOffset'] signal = signal*pq.mV anaSig = AnalogSignal(signal=signal, t_start=t_start * pq.s, sampling_rate= entityHeader['WFrequency'] * pq.Hz, name=entityHeader['name'], channel_index=entityHeader['WireNumber']) if lazy: anaSig.lazy_shape = entityHeader['NPointsWave'] seg.analogsignals.append( anaSig ) if entityHeader['type'] == 6: # markers : TO TEST if lazy: times = [ ]*pq.s labels = np.array([ ], dtype = 'S') markertype = None else: times= np.memmap(self.filename , np.dtype('i4') ,'r' , shape = (entityHeader['n'] ), offset = entityHeader['offset'], ) times = times.astype('f8')/globalHeader['freq'] * pq.s fid.seek(entityHeader['offset'] + entityHeader['n']*4) markertype = fid.read(64).replace('\x00','') labels = np.memmap(self.filename, np.dtype('S' + str(entityHeader['MarkerLength'])) ,'r', shape = (entityHeader['n'] ), offset = entityHeader['offset'] + entityHeader['n']*4 + 64 ) ea = EventArray( times = times, labels = labels.view(np.ndarray), name = entityHeader['name'], channel_index = entityHeader['WireNumber'], marker_type = markertype ) if lazy: ea.lazy_shape = entityHeader['n'] seg.eventarrays.append(ea) seg.create_many_to_one_relationship() return seg
def read_segment( self, cascade=True, lazy=False, ): """ Arguments: """ f = struct_file(self.filename, 'rb') #Name f.seek(64, 0) surname = f.read(22) while surname[-1] == ' ': if len(surname) == 0: break surname = surname[:-1] firstname = f.read(20) while firstname[-1] == ' ': if len(firstname) == 0: break firstname = firstname[:-1] #Date f.seek(128, 0) day, month, year, hour, minute, sec = f.read_f('bbbbbb') rec_datetime = datetime.datetime(year + 1900, month, day, hour, minute, sec) f.seek(138, 0) Data_Start_Offset, Num_Chan, Multiplexer, Rate_Min, Bytes = f.read_f( 'IHHHH') #~ print Num_Chan, Bytes #header version f.seek(175, 0) header_version, = f.read_f('b') assert header_version == 4 seg = Segment( name=firstname + ' ' + surname, file_origin=os.path.basename(self.filename), ) seg.annotate(surname=surname) seg.annotate(firstname=firstname) seg.annotate(rec_datetime=rec_datetime) if not cascade: return seg # area f.seek(176, 0) zone_names = [ 'ORDER', 'LABCOD', 'NOTE', 'FLAGS', 'TRONCA', 'IMPED_B', 'IMPED_E', 'MONTAGE', 'COMPRESS', 'AVERAGE', 'HISTORY', 'DVIDEO', 'EVENT A', 'EVENT B', 'TRIGGER' ] zones = {} for zname in zone_names: zname2, pos, length = f.read_f('8sII') zones[zname] = zname2, pos, length #~ print zname2, pos, length # reading raw data if not lazy: f.seek(Data_Start_Offset, 0) rawdata = np.fromstring(f.read(), dtype='u' + str(Bytes)) rawdata = rawdata.reshape((rawdata.size / Num_Chan, Num_Chan)) # Reading Code Info zname2, pos, length = zones['ORDER'] f.seek(pos, 0) code = np.fromfile(f, dtype='u2', count=Num_Chan) units = { -1: pq.nano * pq.V, 0: pq.uV, 1: pq.mV, 2: 1, 100: pq.percent, 101: pq.dimensionless, 102: pq.dimensionless } for c in range(Num_Chan): zname2, pos, length = zones['LABCOD'] f.seek(pos + code[c] * 128 + 2, 0) label = f.read(6).strip("\x00") ground = f.read(6).strip("\x00") logical_min, logical_max, logical_ground, physical_min, physical_max = f.read_f( 'iiiii') k, = f.read_f('h') if k in units.keys(): unit = units[k] else: unit = pq.uV f.seek(8, 1) sampling_rate, = f.read_f('H') * pq.Hz sampling_rate *= Rate_Min if lazy: signal = [] * unit else: factor = float(physical_max - physical_min) / float(logical_max - logical_min + 1) signal = (rawdata[:, c].astype('f') - logical_ground) * factor * unit anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, name=label, channel_index=c) if lazy: anaSig.lazy_shape = None anaSig.annotate(ground=ground) seg.analogsignals.append(anaSig) sampling_rate = np.mean( [anaSig.sampling_rate for anaSig in seg.analogsignals]) * pq.Hz # Read trigger and notes for zname, label_dtype in [('TRIGGER', 'u2'), ('NOTE', 'S40')]: zname2, pos, length = zones[zname] f.seek(pos, 0) triggers = np.fromstring( f.read(length), dtype=[('pos', 'u4'), ('label', label_dtype)], ) ea = EventArray(name=zname[0] + zname[1:].lower()) if not lazy: keep = (triggers['pos'] >= triggers['pos'][0]) & ( triggers['pos'] < rawdata.shape[0]) & (triggers['pos'] != 0) triggers = triggers[keep] ea.labels = triggers['label'].astype('S') ea.times = (triggers['pos'] / sampling_rate).rescale('s') else: ea.lazy_shape = triggers.size seg.eventarrays.append(ea) # Read Event A and B # Not so well tested for zname in ['EVENT A', 'EVENT B']: zname2, pos, length = zones[zname] f.seek(pos, 0) epochs = np.fromstring(f.read(length), dtype=[ ('label', 'u4'), ('start', 'u4'), ('stop', 'u4'), ]) ep = EpochArray(name=zname[0] + zname[1:].lower()) if not lazy: keep = (epochs['start'] > 0) & ( epochs['start'] < rawdata.shape[0]) & (epochs['stop'] < rawdata.shape[0]) epochs = epochs[keep] ep.labels = epochs['label'].astype('S') ep.times = (epochs['start'] / sampling_rate).rescale('s') ep.durations = ((epochs['stop'] - epochs['start']) / sampling_rate).rescale('s') else: ep.lazy_shape = triggers.size seg.epocharrays.append(ep) seg.create_many_to_one_relationship() return seg
def read_segment(self, cascade = True, lazy = False,): """ Arguments: """ f = struct_file(self.filename, 'rb') #Name f.seek(64,0) surname = f.read(22) while surname[-1] == ' ' : if len(surname) == 0 :break surname = surname[:-1] firstname = f.read(20) while firstname[-1] == ' ' : if len(firstname) == 0 :break firstname = firstname[:-1] #Date f.seek(128,0) day, month, year, hour, minute, sec = f.read_f('bbbbbb') rec_datetime = datetime.datetime(year+1900 , month , day, hour, minute, sec) f.seek(138,0) Data_Start_Offset , Num_Chan , Multiplexer , Rate_Min , Bytes = f.read_f('IHHHH') #~ print Num_Chan, Bytes #header version f.seek(175,0) header_version, = f.read_f('b') assert header_version == 4 seg = Segment( name = firstname+' '+surname, file_origin = os.path.basename(self.filename), ) seg.annotate(surname = surname) seg.annotate(firstname = firstname) seg.annotate(rec_datetime = rec_datetime) if not cascade: return seg # area f.seek(176,0) zone_names = ['ORDER', 'LABCOD', 'NOTE', 'FLAGS', 'TRONCA', 'IMPED_B', 'IMPED_E', 'MONTAGE', 'COMPRESS', 'AVERAGE', 'HISTORY', 'DVIDEO', 'EVENT A', 'EVENT B', 'TRIGGER'] zones = { } for zname in zone_names: zname2, pos, length = f.read_f('8sII') zones[zname] = zname2, pos, length #~ print zname2, pos, length # reading raw data if not lazy: f.seek(Data_Start_Offset,0) rawdata = np.fromstring(f.read() , dtype = 'u'+str(Bytes)) rawdata = rawdata.reshape(( rawdata.size/Num_Chan , Num_Chan)) # Reading Code Info zname2, pos, length = zones['ORDER'] f.seek(pos,0) code = np.fromfile(f, dtype='u2', count=Num_Chan) units = {-1: pq.nano*pq.V, 0:pq.uV, 1:pq.mV, 2:1, 100: pq.percent, 101:pq.dimensionless, 102:pq.dimensionless} for c in range(Num_Chan): zname2, pos, length = zones['LABCOD'] f.seek(pos+code[c]*128+2,0) label = f.read(6).strip("\x00") ground = f.read(6).strip("\x00") logical_min , logical_max, logical_ground, physical_min, physical_max = f.read_f('iiiii') k, = f.read_f('h') if k in units.keys() : unit = units[k] else : unit = pq.uV f.seek(8,1) sampling_rate, = f.read_f('H') * pq.Hz sampling_rate *= Rate_Min if lazy: signal = [ ]*unit else: factor = float(physical_max - physical_min) / float(logical_max-logical_min+1) signal = ( rawdata[:,c].astype('f') - logical_ground )* factor*unit anaSig = AnalogSignal(signal, sampling_rate=sampling_rate, name=label, channel_index=c) if lazy: anaSig.lazy_shape = None anaSig.annotate(ground = ground) seg.analogsignals.append( anaSig ) sampling_rate = np.mean([ anaSig.sampling_rate for anaSig in seg.analogsignals ])*pq.Hz # Read trigger and notes for zname, label_dtype in [ ('TRIGGER', 'u2'), ('NOTE', 'S40') ]: zname2, pos, length = zones[zname] f.seek(pos,0) triggers = np.fromstring(f.read(length) , dtype = [('pos','u4'), ('label', label_dtype)] , ) ea = EventArray(name =zname[0]+zname[1:].lower()) if not lazy: keep = (triggers['pos']>=triggers['pos'][0]) & (triggers['pos']<rawdata.shape[0]) & (triggers['pos']!=0) triggers = triggers[keep] ea.labels = triggers['label'].astype('S') ea.times = (triggers['pos']/sampling_rate).rescale('s') else: ea.lazy_shape = triggers.size seg.eventarrays.append(ea) # Read Event A and B # Not so well tested for zname in ['EVENT A', 'EVENT B']: zname2, pos, length = zones[zname] f.seek(pos,0) epochs = np.fromstring(f.read(length) , dtype = [('label','u4'),('start','u4'),('stop','u4'),] ) ep = EpochArray(name =zname[0]+zname[1:].lower()) if not lazy: keep = (epochs['start']>0) & (epochs['start']<rawdata.shape[0]) & (epochs['stop']<rawdata.shape[0]) epochs = epochs[keep] ep.labels = epochs['label'].astype('S') ep.times = (epochs['start']/sampling_rate).rescale('s') ep.durations = ((epochs['stop'] - epochs['start'])/sampling_rate).rescale('s') else: ep.lazy_shape = triggers.size seg.epocharrays.append(ep) seg.create_many_to_one_relationship() return seg