def setup_irregularlysampledsignals(self): irsigname11 = 'irregularsignal 1 1' irsigname12 = 'irregularsignal 1 2' irsigname21 = 'irregularsignal 2 1' irsigname22 = 'irregularsignal 2 2' irsigdata11 = np.arange(0, 10) * pq.mA irsigdata12 = np.arange(10, 20) * pq.mA irsigdata21 = np.arange(20, 30) * pq.A irsigdata22 = np.arange(30, 40) * pq.A irsigtimes11 = np.arange(0, 10) * pq.ms irsigtimes12 = np.arange(10, 20) * pq.ms irsigtimes21 = np.arange(20, 30) * pq.s irsigtimes22 = np.arange(30, 40) * pq.s self.irsignames1 = [irsigname11, irsigname12] self.irsignames2 = [irsigname21, irsigname22] self.irsignames = [irsigname11, irsigname12, irsigname21, irsigname22] irsig11 = IrregularlySampledSignal(irsigtimes11, irsigdata11, name=irsigname11) irsig12 = IrregularlySampledSignal(irsigtimes12, irsigdata12, name=irsigname12) irsig21 = IrregularlySampledSignal(irsigtimes21, irsigdata21, name=irsigname21) irsig22 = IrregularlySampledSignal(irsigtimes22, irsigdata22, name=irsigname22) self.irsig1 = [irsig11, irsig12] self.irsig2 = [irsig21, irsig22] self.irsig = [irsig11, irsig12, irsig21, irsig22]
def _read_irregularlysampledsignal(self, node, parent): attributes = self._get_standard_attributes(node) signal = IrregularlySampledSignal(times=self._get_quantity(node["times"]), signal=self._get_quantity(node["signal"]), **attributes) signal.segment = parent return signal
def create_irregularlysampledsignal(self, parent=None, name='IrregularlySampledSignal'): signal = IrregularlySampledSignal([1.0, 2.3, 6.4] * pq.ms, [[1, 4], [2, 1], [0, -5]] * pq.mV) signal.segment = parent self._assign_basic_attributes(signal, name=name) return signal
def _read_irregularlysampledsignal(self, node, parent): attributes = self._get_standard_attributes(node) signal = IrregularlySampledSignal(times=self._get_quantity(node["times"]), signal=self._get_quantity(node["signal"]), **attributes) signal.segment = parent if self._lazy: signal.lazy_shape = node["signal"].shape if len(signal.lazy_shape) == 1: signal.lazy_shape = (signal.lazy_shape[0], 1) return signal
def _read_signal_file( filepaths: List[str], signal_unit: Quantity = None) -> IrregularlySampledSignal: # things are a bit complicated here as the signal is not necessarily covering the whole experiment! try: # get the continous signal file signal_file = [ file for file in filepaths if "continuous" in os.path.basename(file).lower() ][0] times = [] signal = [] # open the file for reading with open(signal_file, "r") as file: # and create a reader reader = csv.reader(file, delimiter=",") # this try/catch handles the exception that is raised by "next" if the reader reached the file ending try: while True: # read time and signal rows time_row = np.array([float(val) for val in next(reader)]) signal_row = np.array([float(val) for val in next(reader)]) assert len(time_row) == len(signal_row) times.append(time_row) signal.append(signal_row) except StopIteration: pass # concatenate our list of arrays times = np.concatenate(times) * second signal = np.concatenate(signal) assert len(times) == len(signal) if not signal_unit is None: signal = Quantity(signal, signal_unit) else: signal = Quantity(signal, "dimensionless") result = IrregularlySampledSignal(times=times, signal=signal, name="Irregularly Sampled Signal", file_origin=signal_file) channel_id = f"{TypeID.RAW_DATA.value}.0" result.annotate(id=channel_id, type_id=TypeID.RAW_DATA.value) return result # something might go wrong as we perform some IO operations here except Exception as ex: traceback.print_exc()
def _read_irregularlysampledsignal(self, node, parent): attributes = self._get_standard_attributes(node) signal = IrregularlySampledSignal( times=self._get_quantity(node["times"]), signal=self._get_quantity(node["signal"]), **attributes) signal.segment = parent if self._lazy: signal.lazy_shape = node["signal"].shape if len(signal.lazy_shape) == 1: signal.lazy_shape = (signal.lazy_shape[0], 1) return signal
def _get_tracking(self, channel, conversion): if channel is not None: eva = Event() ttls = self._kwe['event_types']['TTL']['events'][ 'time_samples'].value event_channels = self._kwe['event_types']['TTL']['events'][ 'user_data']['event_channels'].value event_id = self._kwe['event_types']['TTL']['events']['user_data'][ 'eventID'].value eva.times = (ttls[(event_channels == channel) & (event_id == 1)] / self._attrs['kwe']['sample_rate']) * pq.s eva.name = 'TrackingTTL' posdata = self._kwe['event_types']['Binary_messages']['events'][ 'user_data']['Data'].value node_id = self._kwe['event_types']['Binary_messages']['events'][ 'user_data']['nodeID'].value time_samples = self._kwe['event_types']['Binary_messages']['events'][ 'time_samples'].value sigs = [] for node in self._nodes['OSC Port']: irsig = IrregularlySampledSignal( signal=posdata[node_id == int(node['NodeId'])] * conversion * pq.m, times=(time_samples[node_id == int(node['NodeId'])] / self._attrs['kwe']['sample_rate']) * pq.s, name=node['address']) sigs += [irsig] if channel is not None: return eva, sigs else: return sigs
def create_all_annotated(cls): times = cls.rquant(1, pq.s) signal = cls.rquant(1, pq.V) blk = Block() blk.annotate(**cls.rdict(3)) cls.populate_dates(blk) seg = Segment() seg.annotate(**cls.rdict(4)) cls.populate_dates(seg) blk.segments.append(seg) asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz) asig.annotate(**cls.rdict(2)) seg.analogsignals.append(asig) isig = IrregularlySampledSignal(times=times, signal=signal, time_units=pq.s) isig.annotate(**cls.rdict(2)) seg.irregularlysampledsignals.append(isig) epoch = Epoch(times=times, durations=times) epoch.annotate(**cls.rdict(4)) seg.epochs.append(epoch) event = Event(times=times) event.annotate(**cls.rdict(4)) seg.events.append(event) spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s) d = cls.rdict(6) d["quantity"] = pq.Quantity(10, "mV") d["qarray"] = pq.Quantity(range(10), "mA") spiketrain.annotate(**d) seg.spiketrains.append(spiketrain) chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10]) chx.annotate(**cls.rdict(5)) blk.channel_indexes.append(chx) unit = Unit() unit.annotate(**cls.rdict(2)) chx.units.append(unit) return blk
def test_signals_compound_units(self): block = Block() seg = Segment() block.segments.append(seg) units = pq.CompoundUnit("1/30000*V") srate = pq.Quantity(10, pq.CompoundUnit("1.0/10 * Hz")) asig = AnalogSignal(signal=self.rquant((10, 3), units), sampling_rate=srate) seg.analogsignals.append(asig) self.write_and_compare([block]) anotherblock = Block("ir signal block") seg = Segment("ir signal seg") anotherblock.segments.append(seg) irsig = IrregularlySampledSignal(signal=np.random.random((20, 3)), times=self.rquant( 20, pq.CompoundUnit("0.1 * ms"), True), units=pq.CompoundUnit("10 * V / s")) seg.irregularlysampledsignals.append(irsig) self.write_and_compare([block, anotherblock]) block.segments[0].analogsignals.append( AnalogSignal(signal=[10.0, 1.0, 3.0], units=pq.S, sampling_period=pq.Quantity(3, "s"), dtype=np.double, name="signal42", description="this is an analogsignal", t_start=45 * pq.CompoundUnit("3.14 * s")), ) self.write_and_compare([block, anotherblock]) times = self.rquant(10, pq.CompoundUnit("3 * year"), True) block.segments[0].irregularlysampledsignals.append( IrregularlySampledSignal(times=times, signal=np.random.random((10, 3)), units="mV", dtype=np.float, name="some sort of signal", description="the signal is described")) self.write_and_compare([block, anotherblock])
def create_all_annotated(cls): times = cls.rquant(1, pq.s) signal = cls.rquant(1, pq.V) blk = Block() blk.annotate(**cls.rdict(3)) seg = Segment() seg.annotate(**cls.rdict(4)) blk.segments.append(seg) asig = AnalogSignal(signal=signal, sampling_rate=pq.Hz) asig.annotate(**cls.rdict(2)) seg.analogsignals.append(asig) isig = IrregularlySampledSignal(times=times, signal=signal, time_units=pq.s) isig.annotate(**cls.rdict(2)) seg.irregularlysampledsignals.append(isig) epoch = Epoch(times=times, durations=times) epoch.annotate(**cls.rdict(4)) seg.epochs.append(epoch) event = Event(times=times) event.annotate(**cls.rdict(4)) seg.events.append(event) spiketrain = SpikeTrain(times=times, t_stop=pq.s, units=pq.s) d = cls.rdict(6) d["quantity"] = pq.Quantity(10, "mV") d["qarray"] = pq.Quantity(range(10), "mA") spiketrain.annotate(**d) seg.spiketrains.append(spiketrain) chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10]) chx.annotate(**cls.rdict(5)) blk.channel_indexes.append(chx) unit = Unit() unit.annotate(**cls.rdict(2)) chx.units.append(unit) return blk
def read_irregularlysampledsignal(fh, block_id, array_id): nix_block = fh.handle.blocks[block_id] nix_da = nix_block.data_arrays[array_id] params = { 'name': Reader.Help.get_obj_neo_name(nix_da), 'signal': nix_da[:], # TODO think about lazy data loading 'units': nix_da.unit, 'times': nix_da.dimensions[0].ticks, 'time_units': nix_da.dimensions[0].unit, 'dtype': nix_da.dtype, } signal = IrregularlySampledSignal(**params) for key, value in Reader.Help.read_attributes(nix_da.metadata, 'irregularlysampledsignal').items(): setattr(signal, key, value) signal.annotations = Reader.Help.read_annotations(nix_da.metadata, 'irregularlysampledsignal') return signal
def test_signals_write(self): block = Block() seg = Segment() block.segments.append(seg) asig = AnalogSignal(signal=self.rquant((10, 3), pq.mV), sampling_rate=pq.Quantity(10, "Hz")) seg.analogsignals.append(asig) self.write_and_compare([block]) anotherblock = Block("ir signal block") seg = Segment("ir signal seg") anotherblock.segments.append(seg) irsig = IrregularlySampledSignal( signal=np.random.random((20, 3)), times=self.rquant(20, pq.ms, True), units=pq.A ) seg.irregularlysampledsignals.append(irsig) self.write_and_compare([block, anotherblock]) block.segments[0].analogsignals.append( AnalogSignal(signal=[10.0, 1.0, 3.0], units=pq.S, sampling_period=pq.Quantity(3, "s"), dtype=np.double, name="signal42", description="this is an analogsignal", t_start=45 * pq.ms), ) self.write_and_compare([block, anotherblock]) block.segments[0].irregularlysampledsignals.append( IrregularlySampledSignal(times=np.random.random(10), signal=np.random.random((10, 3)), units="mV", time_units="s", dtype=np.float, name="some sort of signal", description="the signal is described") ) self.write_and_compare([block, anotherblock])
def test_anonymous_objects_write(self): nblocks = 2 nsegs = 2 nanasig = 4 nirrseg = 2 nepochs = 3 nevents = 4 nspiketrains = 3 nchx = 5 nunits = 10 times = self.rquant(1, pq.s) signal = self.rquant(1, pq.V) blocks = [] for blkidx in range(nblocks): blk = Block() blocks.append(blk) for segidx in range(nsegs): seg = Segment() blk.segments.append(seg) for anaidx in range(nanasig): seg.analogsignals.append(AnalogSignal(signal=signal, sampling_rate=pq.Hz)) for irridx in range(nirrseg): seg.irregularlysampledsignals.append( IrregularlySampledSignal(times=times, signal=signal, time_units=pq.s) ) for epidx in range(nepochs): seg.epochs.append(Epoch(times=times, durations=times)) for evidx in range(nevents): seg.events.append(Event(times=times)) for stidx in range(nspiketrains): seg.spiketrains.append(SpikeTrain(times=times, t_stop=times[-1]+pq.s, units=pq.s)) for chidx in range(nchx): chx = ChannelIndex(name="chx{}".format(chidx), index=[1, 2], channel_ids=[11, 22]) blk.channel_indexes.append(chx) for unidx in range(nunits): unit = Unit() chx.units.append(unit) self.writer.write_all_blocks(blocks) self.compare_blocks(blocks, self.reader.blocks)
def test_multiref_write(self): blk = Block("blk1") signal = AnalogSignal(name="sig1", signal=[0, 1, 2], units="mV", sampling_period=pq.Quantity(1, "ms")) othersignal = IrregularlySampledSignal(name="i1", signal=[0, 0, 0], units="mV", times=[1, 2, 3], time_units="ms") event = Event(name="Evee", times=[0.3, 0.42], units="year") epoch = Epoch(name="epoche", times=[0.1, 0.2] * pq.min, durations=[0.5, 0.5] * pq.min) st = SpikeTrain(name="the train of spikes", times=[0.1, 0.2, 10.3], t_stop=11, units="us") for idx in range(3): segname = "seg" + str(idx) seg = Segment(segname) blk.segments.append(seg) seg.analogsignals.append(signal) seg.irregularlysampledsignals.append(othersignal) seg.events.append(event) seg.epochs.append(epoch) seg.spiketrains.append(st) chidx = ChannelIndex([10, 20, 29]) seg = blk.segments[0] st = SpikeTrain(name="choochoo", times=[10, 11, 80], t_stop=1000, units="s") seg.spiketrains.append(st) blk.channel_indexes.append(chidx) for idx in range(6): unit = Unit("unit" + str(idx)) chidx.units.append(unit) unit.spiketrains.append(st) self.writer.write_block(blk) self.compare_blocks([blk], self.reader.blocks)
def random_irreg_signal(name=None, **annotations): n_channels = random.randint(1, 7) sig_length = random.randint(20, 200) if len(annotations) == 0: annotations = random_annotations(5) mean_firing_rate = np.random.uniform(0.1, 10) * pq.kHz times = np.cumsum(np.random.uniform(1.0 / mean_firing_rate, size=(sig_length,))) * pq.ms obj = IrregularlySampledSignal( times, np.random.uniform(size=(sig_length, n_channels)), units=random.choice(("mV", "nA")), name=name or random_string(), file_origin=random_string(), description=random_string(100), array_annotations=None, # todo **annotations ) return obj
def test_no_segment_write(self): # Tests storing AnalogSignal, IrregularlySampledSignal, and SpikeTrain # objects in the secondary (ChannelIndex) substructure without them # being attached to a Segment. blk = Block("segmentless block") signal = AnalogSignal(name="sig1", signal=[0, 1, 2], units="mV", sampling_period=pq.Quantity(1, "ms")) othersignal = IrregularlySampledSignal(name="i1", signal=[0, 0, 0], units="mV", times=[1, 2, 3], time_units="ms") sta = SpikeTrain(name="the train of spikes", times=[0.1, 0.2, 10.3], t_stop=11, units="us") stb = SpikeTrain(name="the train of spikes b", times=[1.1, 2.2, 10.1], t_stop=100, units="ms") chidx = ChannelIndex([8, 13, 21]) blk.channel_indexes.append(chidx) chidx.analogsignals.append(signal) chidx.irregularlysampledsignals.append(othersignal) unit = Unit() chidx.units.append(unit) unit.spiketrains.extend([sta, stb]) self.writer.write_block(blk) self.compare_blocks([blk], self.reader.blocks) self.writer.close() reader = NixIO(self.filename, "ro") blk = reader.read_block(neoname="segmentless block") chx = blk.channel_indexes[0] self.assertEqual(len(chx.analogsignals), 1) self.assertEqual(len(chx.irregularlysampledsignals), 1) self.assertEqual(len(chx.units[0].spiketrains), 2)
def load(self, time_slice=None, strict_slicing=True): """ Load AnalogSignalProxy args: :param time_slice: None or tuple of the time slice expressed with quantities. None is the entire signal. :param strict_slicing: True by default. Control if an error is raised or not when one of the time_slice members (t_start or t_stop) is outside the real time range of the segment. """ i_start, i_stop, sig_t_start = None, None, self.t_start if time_slice: if self.sampling_rate is None: i_start, i_stop = np.searchsorted(self._timeseries.timestamps, time_slice) else: i_start, i_stop, sig_t_start = self._time_slice_indices( time_slice, strict_slicing=strict_slicing) signal = self._timeseries.data[i_start:i_stop] if self.sampling_rate is None: return IrregularlySampledSignal( self._timeseries.timestamps[i_start:i_stop] * pq.s, signal, units=self.units, t_start=sig_t_start, sampling_rate=self.sampling_rate, name=self.name, description=self.description, array_annotations=None, **self.annotations ) # todo: timeseries.control / control_description else: return AnalogSignal( signal, units=self.units, t_start=sig_t_start, sampling_rate=self.sampling_rate, name=self.name, description=self.description, array_annotations=None, **self.annotations ) # todo: timeseries.control / control_description
def read_tracking(self, path): """ Read tracking data_end """ if (len(path) == 0): pos_group = self._processing["Position"] else: pos_group = self._processing[path]["Position"] irr_signals = [] for key in pos_group: spot_group = pos_group[key] times = spot_group["timestamps"] coords = spot_group["data"] irr_signal = IrregularlySampledSignal( name=pos_group[key].name, signal=coords.data, times=times.data, units=coords.attrs["unit"], time_units=times.attrs["unit"]) irr_signals.append(irr_signal) return irr_signals
def test__issue_285(self): # Spiketrain train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0) unit = Unit() train.unit = unit unit.spiketrains.append(train) epoch = Epoch(np.array([0, 10, 20]), np.array([2, 2, 2]), np.array(["a", "b", "c"]), units="ms") blk = Block() seg = Segment() seg.spiketrains.append(train) seg.epochs.append(epoch) epoch.segment = seg blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.spiketrains[0].unit, Unit) self.assertIsInstance(r_seg.epochs[0], Epoch) os.remove('blk.pkl') # Epoch epoch = Epoch(times=np.arange(0, 30, 10) * pq.s, durations=[10, 5, 7] * pq.ms, labels=np.array(['btn0', 'btn1', 'btn2'], dtype='U')) epoch.segment = Segment() blk = Block() seg = Segment() seg.epochs.append(epoch) blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.epochs[0].segment, Segment) os.remove('blk.pkl') # Event event = Event(np.arange(0, 30, 10) * pq.s, labels=np.array(['trig0', 'trig1', 'trig2'], dtype='U')) event.segment = Segment() blk = Block() seg = Segment() seg.events.append(event) blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.events[0].segment, Segment) os.remove('blk.pkl') # IrregularlySampledSignal signal = IrregularlySampledSignal([0.0, 1.23, 6.78], [1, 2, 3], units='mV', time_units='ms') signal.segment = Segment() blk = Block() seg = Segment() seg.irregularlysampledsignals.append(signal) blk.segments.append(seg) blk.segments[0].block = blk reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.irregularlysampledsignals[0].segment, Segment) os.remove('blk.pkl')
def test_roundtrip(self): annotations = {"session_start_time": datetime.now()} # Define Neo blocks bl0 = Block(name='First block', **annotations) bl1 = Block(name='Second block', **annotations) bl2 = Block(name='Third block', **annotations) original_blocks = [bl0, bl1, bl2] num_seg = 4 # number of segments num_chan = 3 # number of channels for blk in original_blocks: for ind in range(num_seg): # number of Segments seg = Segment(index=ind) seg.block = blk blk.segments.append(seg) for seg in blk.segments: # AnalogSignal objects # 3 Neo AnalogSignals a = AnalogSignal(np.random.randn(44, num_chan) * pq.nA, sampling_rate=10 * pq.kHz, t_start=50 * pq.ms) b = AnalogSignal(np.random.randn(64, num_chan) * pq.mV, sampling_rate=8 * pq.kHz, t_start=40 * pq.ms) c = AnalogSignal(np.random.randn(33, num_chan) * pq.uA, sampling_rate=10 * pq.kHz, t_start=120 * pq.ms) # 2 Neo IrregularlySampledSignals d = IrregularlySampledSignal( np.arange(7.0) * pq.ms, np.random.randn(7, num_chan) * pq.mV) # 2 Neo SpikeTrains train = SpikeTrain(times=[1, 2, 3] * pq.s, t_start=1.0, t_stop=10.0) train2 = SpikeTrain(times=[4, 5, 6] * pq.s, t_stop=10.0) # todo: add waveforms # 1 Neo Event evt = Event(times=np.arange(0, 30, 10) * pq.ms, labels=np.array(['ev0', 'ev1', 'ev2'])) # 2 Neo Epochs epc = Epoch(times=np.arange(0, 30, 10) * pq.s, durations=[10, 5, 7] * pq.ms, labels=np.array(['btn0', 'btn1', 'btn2'])) epc2 = Epoch(times=np.arange(10, 40, 10) * pq.s, durations=[9, 3, 8] * pq.ms, labels=np.array(['btn3', 'btn4', 'btn5'])) seg.spiketrains.append(train) seg.spiketrains.append(train2) seg.epochs.append(epc) seg.epochs.append(epc2) seg.analogsignals.append(a) seg.analogsignals.append(b) seg.analogsignals.append(c) seg.irregularlysampledsignals.append(d) seg.events.append(evt) a.segment = seg b.segment = seg c.segment = seg d.segment = seg evt.segment = seg train.segment = seg train2.segment = seg epc.segment = seg epc2.segment = seg # write to file test_file_name = "test_round_trip.nwb" iow = NWBIO(filename=test_file_name, mode='w') iow.write_all_blocks(original_blocks) ior = NWBIO(filename=test_file_name, mode='r') retrieved_blocks = ior.read_all_blocks() self.assertEqual(len(retrieved_blocks), 3) self.assertEqual(len(retrieved_blocks[2].segments), num_seg) original_signal_22b = original_blocks[2].segments[2].analogsignals[1] retrieved_signal_22b = retrieved_blocks[2].segments[2].analogsignals[1] for attr_name in ("name", "units", "sampling_rate", "t_start"): retrieved_attribute = getattr(retrieved_signal_22b, attr_name) original_attribute = getattr(original_signal_22b, attr_name) self.assertEqual(retrieved_attribute, original_attribute) assert_array_equal(retrieved_signal_22b.magnitude, original_signal_22b.magnitude) original_issignal_22d = original_blocks[2].segments[ 2].irregularlysampledsignals[0] retrieved_issignal_22d = retrieved_blocks[2].segments[ 2].irregularlysampledsignals[0] for attr_name in ("name", "units", "t_start"): retrieved_attribute = getattr(retrieved_issignal_22d, attr_name) original_attribute = getattr(original_issignal_22d, attr_name) self.assertEqual(retrieved_attribute, original_attribute) assert_array_equal( retrieved_issignal_22d.times.rescale('ms').magnitude, original_issignal_22d.times.rescale('ms').magnitude) assert_array_equal(retrieved_issignal_22d.magnitude, original_issignal_22d.magnitude) original_event_11 = original_blocks[1].segments[1].events[0] retrieved_event_11 = retrieved_blocks[1].segments[1].events[0] for attr_name in ("name", ): retrieved_attribute = getattr(retrieved_event_11, attr_name) original_attribute = getattr(original_event_11, attr_name) self.assertEqual(retrieved_attribute, original_attribute) assert_array_equal( retrieved_event_11.rescale('ms').magnitude, original_event_11.rescale('ms').magnitude) assert_array_equal(retrieved_event_11.labels, original_event_11.labels) original_spiketrain_131 = original_blocks[1].segments[1].spiketrains[1] retrieved_spiketrain_131 = retrieved_blocks[1].segments[1].spiketrains[ 1] for attr_name in ("name", "t_start", "t_stop"): retrieved_attribute = getattr(retrieved_spiketrain_131, attr_name) original_attribute = getattr(original_spiketrain_131, attr_name) self.assertEqual(retrieved_attribute, original_attribute) assert_array_equal( retrieved_spiketrain_131.times.rescale('ms').magnitude, original_spiketrain_131.times.rescale('ms').magnitude) original_epoch_11 = original_blocks[1].segments[1].epochs[0] retrieved_epoch_11 = retrieved_blocks[1].segments[1].epochs[0] for attr_name in ("name", ): retrieved_attribute = getattr(retrieved_epoch_11, attr_name) original_attribute = getattr(original_epoch_11, attr_name) self.assertEqual(retrieved_attribute, original_attribute) assert_array_equal( retrieved_epoch_11.rescale('ms').magnitude, original_epoch_11.rescale('ms').magnitude) assert_allclose( retrieved_epoch_11.durations.rescale('ms').magnitude, original_epoch_11.durations.rescale('ms').magnitude) assert_array_equal(retrieved_epoch_11.labels, original_epoch_11.labels) os.remove(test_file_name)
def read_segment(self, lazy=False): """ """ if lazy: raise NotImplementedError("lazy mode not supported") seg = Segment(file_origin=os.path.basename(self.filename)) # loadtxt if self.method == 'genfromtxt': sig = np.genfromtxt(self.filename, delimiter=self.delimiter, usecols=self.usecols, skip_header=self.skiprows, dtype='f') if len(sig.shape) == 1: sig = sig[:, np.newaxis] elif self.method == 'csv': with open(self.filename, 'rU') as fp: tab = [l for l in csv.reader(fp, delimiter=self.delimiter)] tab = tab[self.skiprows:] sig = np.array(tab, dtype='f') if self.usecols is not None: mask = np.array(self.usecols) sig = sig[:, mask] elif self.method == 'homemade': fid = open(self.filename, 'rU') for l in range(self.skiprows): fid.readline() tab = [] for line in fid.readlines(): line = line.replace('\r', '') line = line.replace('\n', '') parts = line.split(self.delimiter) while '' in parts: parts.remove('') tab.append(parts) sig = np.array(tab, dtype='f') if self.usecols is not None: mask = np.array(self.usecols) sig = sig[:, mask] else: sig = self.method(self.filename, self.usecols) if not isinstance(sig, np.ndarray): raise TypeError("method function must return a NumPy array") if len(sig.shape) == 1: sig = sig[:, np.newaxis] elif len(sig.shape) != 2: raise ValueError( "method function must return a 1D or 2D NumPy array") if self.timecolumn is None: sampling_rate = self.sampling_rate t_start = self.t_start else: delta_t = np.diff(sig[:, self.timecolumn]) mean_delta_t = np.mean(delta_t) if (delta_t.max() - delta_t.min()) / mean_delta_t < 1e-6: # equally spaced --> AnalogSignal sampling_rate = 1.0 / np.mean(np.diff( sig[:, self.timecolumn])) / self.time_units else: # not equally spaced --> IrregularlySampledSignal sampling_rate = None t_start = sig[0, self.timecolumn] * self.time_units if self.signal_group_mode == 'all-in-one': if self.timecolumn is not None: mask = list(range(sig.shape[1])) if self.timecolumn >= 0: mask.remove(self.timecolumn) else: # allow negative column index mask.remove(sig.shape[1] + self.timecolumn) signal = sig[:, mask] else: signal = sig if sampling_rate is None: irr_sig = IrregularlySampledSignal(signal[:, self.timecolumn] * self.time_units, signal * self.units, name='multichannel') seg.irregularlysampledsignals.append(irr_sig) else: ana_sig = AnalogSignal(signal * self.units, sampling_rate=sampling_rate, t_start=t_start, channel_index=self.usecols or np.arange(signal.shape[1]), name='multichannel') seg.analogsignals.append(ana_sig) else: if self.timecolumn is not None and self.timecolumn < 0: time_col = sig.shape[1] + self.timecolumn else: time_col = self.timecolumn for i in range(sig.shape[1]): if time_col == i: continue signal = sig[:, i] * self.units if sampling_rate is None: irr_sig = IrregularlySampledSignal(sig[:, time_col] * self.time_units, signal, t_start=t_start, channel_index=i, name='Column %d' % i) seg.irregularlysampledsignals.append(irr_sig) else: ana_sig = AnalogSignal(signal, sampling_rate=sampling_rate, t_start=t_start, channel_index=i, name='Column %d' % i) seg.analogsignals.append(ana_sig) seg.create_many_to_one_relationship() return seg
def test__issue_285(self): # Spiketrain train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0) unit = Unit() train.unit = unit unit.spiketrains.append(train) epoch = Epoch(np.array([0, 10, 20]), np.array([2, 2, 2]), np.array(["a", "b", "c"]), units="ms") blk = Block() seg = Segment() seg.spiketrains.append(train) seg.epochs.append(epoch) epoch.segment = seg blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.spiketrains[0].unit, Unit) self.assertIsInstance(r_seg.epochs[0], Epoch) os.remove('blk.pkl') # Epoch epoch = Epoch(times=np.arange(0, 30, 10) * pq.s, durations=[10, 5, 7] * pq.ms, labels=np.array(['btn0', 'btn1', 'btn2'], dtype='S')) epoch.segment = Segment() blk = Block() seg = Segment() seg.epochs.append(epoch) blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.epochs[0].segment, Segment) os.remove('blk.pkl') # Event event = Event(np.arange(0, 30, 10) * pq.s, labels=np.array(['trig0', 'trig1', 'trig2'], dtype='S')) event.segment = Segment() blk = Block() seg = Segment() seg.events.append(event) blk.segments.append(seg) reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.events[0].segment, Segment) os.remove('blk.pkl') # IrregularlySampledSignal signal = IrregularlySampledSignal( [0.0, 1.23, 6.78], [1, 2, 3], units='mV', time_units='ms') signal.segment = Segment() blk = Block() seg = Segment() seg.irregularlysampledsignals.append(signal) blk.segments.append(seg) blk.segments[0].block = blk reader = PickleIO(filename="blk.pkl") reader.write(blk) reader = PickleIO(filename="blk.pkl") r_blk = reader.read_block() r_seg = r_blk.segments[0] self.assertIsInstance(r_seg.irregularlysampledsignals[0].segment, Segment) os.remove('blk.pkl')
def read_tracking(self): """ Read tracking data_end """ # TODO fix for multiple .pos files if necessary # TODO store attributes, such as pixels_per_metre pos_filename = os.path.join(self._path, self._base_filename + ".pos") if not os.path.exists(pos_filename): raise IOError("'.pos' file not found:" + pos_filename) with open(pos_filename, "rb") as f: params = parse_header_and_leave_cursor(f) # print(params) sample_rate_split = params["sample_rate"].split(" ") assert (sample_rate_split[1] == "hz") sample_rate = float( sample_rate_split[0]) * pq.Hz # sample_rate 50.0 hz eeg_samples_per_position = float( params["EEG_samples_per_position"]) # TODO remove? pos_samples_count = int(params["num_pos_samples"]) bytes_per_timestamp = int(params["bytes_per_timestamp"]) bytes_per_coord = int(params["bytes_per_coord"]) timestamp_dtype = ">i" + str(bytes_per_timestamp) coord_dtype = ">i" + str(bytes_per_coord) bytes_per_pixel_count = 4 pixel_count_dtype = ">i" + str(bytes_per_pixel_count) bytes_per_pos = ( bytes_per_timestamp + 2 * self._tracked_spots_count * bytes_per_coord + 8 ) # pos_format is as follows for this file t,x1,y1,x2,y2,numpix1,numpix2. # read data: dtype = np.dtype([("t", (timestamp_dtype, 1)), ("coords", (coord_dtype, 1), 2 * self._tracked_spots_count), ("pixel_count", (pixel_count_dtype, 1), 2)]) data = np.fromfile(f, dtype=dtype, count=pos_samples_count) assert_end_of_data(f) time_scale = float(params["timebase"].split(" ")[0]) * pq.Hz times = data["t"].astype(float) / time_scale length_scale = float(params["pixels_per_metre"]) / pq.m coords = data["coords"].astype(float) / length_scale # positions with value 1023 are missing for i in range(2 * self._tracked_spots_count): coords[np.where(data["coords"][:, i] == 1023)] = np.nan * pq.m irr_signals = [] for i in range(self._tracked_spots_count): irr_signal = IrregularlySampledSignal( name="tracking_xy" + str(i), signal=coords[:, i * 2:i * 2 + 1 + 1], # + 1 for y + 1 for Python times=times, units="m", time_units="s", **params) irr_signals.append(irr_signal) # TODO add this signal to a channel index? return irr_signals
def _signal_da_to_neo(self, nix_da_group, lazy): """ Convert a group of NIX DataArrays to a Neo signal. This method expects a list of data arrays that all represent the same, multidimensional Neo Signal object. This returns either an AnalogSignal or IrregularlySampledSignal. :param nix_da_group: a list of NIX DataArray objects :return: a Neo Signal object """ nix_da_group = sorted(nix_da_group, key=lambda d: d.name) neo_attrs = self._nix_attr_to_neo(nix_da_group[0]) metadata = nix_da_group[0].metadata neo_type = nix_da_group[0].type neo_attrs["nix_name"] = metadata.name # use the common base name unit = nix_da_group[0].unit if lazy: signaldata = pq.Quantity(np.empty(0), unit) lazy_shape = (len(nix_da_group[0]), len(nix_da_group)) else: signaldata = np.array([d[:] for d in nix_da_group]).transpose() signaldata = pq.Quantity(signaldata, unit) lazy_shape = None timedim = self._get_time_dimension(nix_da_group[0]) if (neo_type == "neo.analogsignal" or timedim.dimension_type == nix.DimensionType.Sample): if lazy: sampling_period = pq.Quantity(1, timedim.unit) t_start = pq.Quantity(0, timedim.unit) else: if "sampling_interval.units" in metadata.props: sample_units = metadata["sampling_interval.units"] else: sample_units = timedim.unit sampling_period = pq.Quantity(timedim.sampling_interval, sample_units) if "t_start.units" in metadata.props: tsunits = metadata["t_start.units"] else: tsunits = timedim.unit t_start = pq.Quantity(timedim.offset, tsunits) neo_signal = AnalogSignal(signal=signaldata, sampling_period=sampling_period, t_start=t_start, **neo_attrs) elif (neo_type == "neo.irregularlysampledsignal" or timedim.dimension_type == nix.DimensionType.Range): if lazy: times = pq.Quantity(np.empty(0), timedim.unit) else: times = pq.Quantity(timedim.ticks, timedim.unit) neo_signal = IrregularlySampledSignal(signal=signaldata, times=times, **neo_attrs) else: return None for da in nix_da_group: self._object_map[da.id] = neo_signal if lazy_shape: neo_signal.lazy_shape = lazy_shape return neo_signal
def test__cut_block_by_epochs(self): epoch = Epoch([0.5, 10.0, 25.2] * pq.s, durations=[5.1, 4.8, 5.0] * pq.s, t_start=.1 * pq.s) epoch.annotate(epoch_type='a', pick='me') epoch.array_annotate(trial_id=[1, 2, 3]) epoch2 = Epoch([0.6, 9.5, 16.8, 34.1] * pq.s, durations=[4.5, 4.8, 5.0, 5.0] * pq.s, t_start=.1 * pq.s) epoch2.annotate(epoch_type='b') epoch2.array_annotate(trial_id=[1, 2, 3, 4]) event = Event(times=[0.5, 10.0, 25.2] * pq.s, t_start=.1 * pq.s) event.annotate(event_type='trial start') event.array_annotate(trial_id=[1, 2, 3]) anasig = AnalogSignal(np.arange(50.0) * pq.mV, t_start=.1 * pq.s, sampling_rate=1.0 * pq.Hz) irrsig = IrregularlySampledSignal(signal=np.arange(50.0) * pq.mV, times=anasig.times, t_start=.1 * pq.s) st = SpikeTrain( np.arange(0.5, 50, 7) * pq.s, t_start=.1 * pq.s, t_stop=50.0 * pq.s, waveforms=np.array( [[[0., 1.], [0.1, 1.1]], [[2., 3.], [2.1, 3.1]], [[4., 5.], [4.1, 5.1]], [[6., 7.], [6.1, 7.1]], [[8., 9.], [8.1, 9.1]], [[12., 13.], [12.1, 13.1]], [[14., 15.], [14.1, 15.1]], [[16., 17.], [16.1, 17.1]]]) * pq.mV, array_annotations={'spikenum': np.arange(1, 9)}) seg = Segment() seg2 = Segment(name='NoCut') seg.epochs = [epoch, epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg, seg2] block.create_many_to_one_relationship() # test without resetting the time cut_block_by_epochs(block, properties={'pick': 'me'}) assert_neo_object_is_compliant(block) self.assertEqual(len(block.segments), 3) for epoch_idx in range(len(epoch)): self.assertEqual(len(block.segments[epoch_idx].events), 1) self.assertEqual(len(block.segments[epoch_idx].spiketrains), 1) self.assertEqual(len(block.segments[epoch_idx].analogsignals), 1) self.assertEqual( len(block.segments[epoch_idx].irregularlysampledsignals), 1) if epoch_idx != 0: self.assertEqual(len(block.segments[epoch_idx].epochs), 1) else: self.assertEqual(len(block.segments[epoch_idx].epochs), 2) assert_same_attributes( block.segments[epoch_idx].spiketrains[0], st.time_slice(t_start=epoch.times[epoch_idx], t_stop=epoch.times[epoch_idx] + epoch.durations[epoch_idx])) assert_same_attributes( block.segments[epoch_idx].analogsignals[0], anasig.time_slice(t_start=epoch.times[epoch_idx], t_stop=epoch.times[epoch_idx] + epoch.durations[epoch_idx])) assert_same_attributes( block.segments[epoch_idx].irregularlysampledsignals[0], irrsig.time_slice(t_start=epoch.times[epoch_idx], t_stop=epoch.times[epoch_idx] + epoch.durations[epoch_idx])) assert_same_attributes( block.segments[epoch_idx].events[0], event.time_slice(t_start=epoch.times[epoch_idx], t_stop=epoch.times[epoch_idx] + epoch.durations[epoch_idx])) assert_same_attributes( block.segments[0].epochs[0], epoch.time_slice(t_start=epoch.times[0], t_stop=epoch.times[0] + epoch.durations[0])) assert_same_attributes( block.segments[0].epochs[1], epoch2.time_slice(t_start=epoch.times[0], t_stop=epoch.times[0] + epoch.durations[0])) seg = Segment() seg2 = Segment(name='NoCut') seg.epochs = [epoch, epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg, seg2] block.create_many_to_one_relationship() # test with resetting the time cut_block_by_epochs(block, properties={'pick': 'me'}, reset_time=True) assert_neo_object_is_compliant(block) self.assertEqual(len(block.segments), 3) for epoch_idx in range(len(epoch)): self.assertEqual(len(block.segments[epoch_idx].events), 1) self.assertEqual(len(block.segments[epoch_idx].spiketrains), 1) self.assertEqual(len(block.segments[epoch_idx].analogsignals), 1) self.assertEqual( len(block.segments[epoch_idx].irregularlysampledsignals), 1) if epoch_idx != 0: self.assertEqual(len(block.segments[epoch_idx].epochs), 1) else: self.assertEqual(len(block.segments[epoch_idx].epochs), 2) assert_same_attributes( block.segments[epoch_idx].spiketrains[0], st.time_shift(-epoch.times[epoch_idx]).time_slice( t_start=0 * pq.s, t_stop=epoch.durations[epoch_idx])) anasig_target = anasig.time_shift(-epoch.times[epoch_idx]) anasig_target = anasig_target.time_slice( t_start=0 * pq.s, t_stop=epoch.durations[epoch_idx]) assert_same_attributes(block.segments[epoch_idx].analogsignals[0], anasig_target) irrsig_target = irrsig.time_shift(-epoch.times[epoch_idx]) irrsig_target = irrsig_target.time_slice( t_start=0 * pq.s, t_stop=epoch.durations[epoch_idx]) assert_same_attributes( block.segments[epoch_idx].irregularlysampledsignals[0], irrsig_target) assert_same_attributes( block.segments[epoch_idx].events[0], event.time_shift(-epoch.times[epoch_idx]).time_slice( t_start=0 * pq.s, t_stop=epoch.durations[epoch_idx])) assert_same_attributes( block.segments[0].epochs[0], epoch.time_shift(-epoch.times[0]).time_slice( t_start=0 * pq.s, t_stop=epoch.durations[0])) assert_same_attributes( block.segments[0].epochs[1], epoch2.time_shift(-epoch.times[0]).time_slice( t_start=0 * pq.s, t_stop=epoch.durations[0]))
def test__time_slice(self): time_slice = [.5, 5.6] * pq.s epoch2 = Epoch([0.6, 9.5, 16.8, 34.1] * pq.s, durations=[4.5, 4.8, 5.0, 5.0] * pq.s, t_start=.1 * pq.s) epoch2.annotate(epoch_type='b') epoch2.array_annotate(trial_id=[1, 2, 3, 4]) event = Event(times=[0.5, 10.0, 25.2] * pq.s, t_start=.1 * pq.s) event.annotate(event_type='trial start') event.array_annotate(trial_id=[1, 2, 3]) anasig = AnalogSignal(np.arange(50.0) * pq.mV, t_start=.1 * pq.s, sampling_rate=1.0 * pq.Hz) irrsig = IrregularlySampledSignal(signal=np.arange(50.0) * pq.mV, times=anasig.times, t_start=.1 * pq.s) st = SpikeTrain( np.arange(0.5, 50, 7) * pq.s, t_start=.1 * pq.s, t_stop=50.0 * pq.s, waveforms=np.array( [[[0., 1.], [0.1, 1.1]], [[2., 3.], [2.1, 3.1]], [[4., 5.], [4.1, 5.1]], [[6., 7.], [6.1, 7.1]], [[8., 9.], [8.1, 9.1]], [[12., 13.], [12.1, 13.1]], [[14., 15.], [14.1, 15.1]], [[16., 17.], [16.1, 17.1]]]) * pq.mV, array_annotations={'spikenum': np.arange(1, 9)}) seg = Segment() seg.epochs = [epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test without resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1]) assert_neo_object_is_compliant(sliced) self.assertEqual(len(sliced.events), 1) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertEqual(len(sliced.irregularlysampledsignals), 1) self.assertEqual(len(sliced.epochs), 1) assert_same_attributes( sliced.spiketrains[0], st.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.analogsignals[0], anasig.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.irregularlysampledsignals[0], irrsig.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.events[0], event.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) assert_same_attributes( sliced.epochs[0], epoch2.time_slice(t_start=time_slice[0], t_stop=time_slice[1])) seg = Segment() seg.epochs = [epoch2] seg.events = [event] seg.analogsignals = [anasig] seg.irregularlysampledsignals = [irrsig] seg.spiketrains = [st] block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test with resetting the time sliced = seg.time_slice(time_slice[0], time_slice[1], reset_time=True) assert_neo_object_is_compliant(sliced) self.assertEqual(len(sliced.events), 1) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertEqual(len(sliced.irregularlysampledsignals), 1) self.assertEqual(len(sliced.epochs), 1) assert_same_attributes( sliced.spiketrains[0], st.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) anasig_target = anasig.copy() anasig_target = anasig_target.time_shift(-time_slice[0]).time_slice( t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0]) assert_same_attributes(sliced.analogsignals[0], anasig_target) irrsig_target = irrsig.copy() irrsig_target = irrsig_target.time_shift(-time_slice[0]).time_slice( t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0]) assert_same_attributes(sliced.irregularlysampledsignals[0], irrsig_target) assert_same_attributes( sliced.events[0], event.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) assert_same_attributes( sliced.epochs[0], epoch2.time_shift(-time_slice[0]).time_slice(t_start=0 * pq.s, t_stop=time_slice[1] - time_slice[0])) seg = Segment() reader = ExampleRawIO(filename='my_filename.fake') reader.parse_header() proxy_anasig = AnalogSignalProxy(rawio=reader, stream_index=0, inner_stream_channels=None, block_index=0, seg_index=0) seg.analogsignals.append(proxy_anasig) proxy_st = SpikeTrainProxy(rawio=reader, spike_channel_index=0, block_index=0, seg_index=0) seg.spiketrains.append(proxy_st) proxy_event = EventProxy(rawio=reader, event_channel_index=0, block_index=0, seg_index=0) seg.events.append(proxy_event) proxy_epoch = EpochProxy(rawio=reader, event_channel_index=1, block_index=0, seg_index=0) proxy_epoch.annotate(pick='me') seg.epochs.append(proxy_epoch) loaded_epoch = proxy_epoch.load() loaded_event = proxy_event.load() loaded_st = proxy_st.load() loaded_anasig = proxy_anasig.load() block = Block() block.segments = [seg] block.create_many_to_one_relationship() # test with proxy objects sliced = seg.time_slice(time_slice[0], time_slice[1]) assert_neo_object_is_compliant(sliced) sliced_event = loaded_event.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) has_event = len(sliced_event) > 0 sliced_anasig = loaded_anasig.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) sliced_st = loaded_st.time_slice(t_start=time_slice[0], t_stop=time_slice[1]) self.assertEqual(len(sliced.events), int(has_event)) self.assertEqual(len(sliced.spiketrains), 1) self.assertEqual(len(sliced.analogsignals), 1) self.assertTrue(isinstance(sliced.spiketrains[0], SpikeTrain)) assert_same_attributes(sliced.spiketrains[0], sliced_st) self.assertTrue(isinstance(sliced.analogsignals[0], AnalogSignal)) assert_same_attributes(sliced.analogsignals[0], sliced_anasig) if has_event: self.assertTrue(isinstance(sliced.events[0], Event)) assert_same_attributes(sliced.events[0], sliced_event)
def _handle_timeseries(self, name, timeseries): # todo: check timeseries.attrs.get('schema_id') # todo: handle timeseries.attrs.get('source') subtype = timeseries.attrs['ancestry'][-1] data_group = timeseries.get('data') dtype = data_group.dtype if self._lazy: data = np.array((), dtype=dtype) lazy_shape = data_group.value.shape # inefficient to load the data to get the shape else: data = data_group.value if dtype.type is np.string_: if self._lazy: times = np.array(()) else: times = timeseries.get('timestamps') durations = timeseries.get('durations') if durations: # Epoch if self._lazy: durations = np.array(()) obj = Epoch(times=times, durations=durations, labels=data, units='second') else: # Event obj = Event(times=times, labels=data, units='second') else: units = get_units(data_group) if 'starting_time' in timeseries: # AnalogSignal sampling_metadata = timeseries.get('starting_time') t_start = sampling_metadata.value * pq.s sampling_rate = sampling_metadata.attrs.get('rate') * pq.Hz assert sampling_metadata.attrs.get('unit') == 'Seconds' # todo: handle data.attrs['resolution'] obj = AnalogSignal(data, units=units, sampling_rate=sampling_rate, t_start=t_start, name=name) elif 'timestamps' in timeseries: # IrregularlySampledSignal if self._lazy: time_data = np.array(()) else: time_data = timeseries.get('timestamps') assert time_data.attrs.get('unit') == 'Seconds' obj = IrregularlySampledSignal(time_data.value, data, units=units, time_units=pq.second) else: raise Exception("Timeseries group does not contain sufficient time information") if self._lazy: obj.lazy_shape = lazy_shape return obj