Esempio n. 1
0
    def test_annotations_special_cases(self):
        # Special cases for annotations: empty list, list of strings,
        # multidimensional lists/arrays
        # These are handled differently on read, so we test them on a block
        # instead of just checking the property writer method
        # empty value

        # empty list
        wblock = Block("block with empty list", an_empty_list=list())
        self.writer.write_block(wblock)
        rblock = self.writer.read_block(neoname="block with empty list")
        self.assertEqual(rblock.annotations["an_empty_list"], list())

        # empty tuple (gets read out as list)
        wblock = Block("block with empty tuple", an_empty_tuple=tuple())
        self.writer.write_block(wblock)
        rblock = self.writer.read_block(neoname="block with empty tuple")
        self.assertEqual(rblock.annotations["an_empty_tuple"], list())

        # list of strings
        losval = ["one", "two", "one million"]
        wblock = Block("block with list of strings", los=losval)
        self.writer.write_block(wblock)
        rblock = self.writer.read_block(neoname="block with list of strings")
        self.assertEqual(rblock.annotations["los"], losval)
Esempio n. 2
0
    def test_time_slice_None(self):
        time_slices = [(None, 5.0 * pq.s), (5.0 * pq.s, None), (None, None)]

        anasig = AnalogSignal(np.arange(50.0) * pq.mV,
                              sampling_rate=1.0 * pq.Hz)
        seg = Segment()
        seg.analogsignals = [anasig]

        block = Block()
        block.segments = [seg]
        block.create_many_to_one_relationship()

        # test without resetting the time
        for t_start, t_stop in time_slices:
            sliced = seg.time_slice(t_start, t_stop)

            assert_neo_object_is_compliant(sliced)
            self.assertEqual(len(sliced.analogsignals), 1)

            exp_t_start, exp_t_stop = t_start, t_stop
            if exp_t_start is None:
                exp_t_start = seg.t_start
            if exp_t_stop is None:
                exp_t_stop = seg.t_stop

            self.assertEqual(exp_t_start, sliced.t_start)
            self.assertEqual(exp_t_stop, sliced.t_stop)
Esempio n. 3
0
    def read_block(self, lazy=False, group=None, reader=None):
        """
        Read a Block from the file

        :param lazy: Enables lazy reading
        :param group: HDF5 Group representing the block in NSDF model tree (optional)
        :param reader: NSDFReader instance (optional)
        :return: Read block
        """
        assert not lazy, 'Do not support lazy'

        block = Block()
        group, reader = self._select_first_container(group, reader, 'block')

        if group is None:
            return None

        attrs = group.attrs

        self._read_block_children(block, group, reader)
        block.create_many_to_one_relationship()

        self._read_container_metadata(attrs, block)

        return block
Esempio n. 4
0
    def test_block_write(self):
        block = Block(name=self.rword(),
                      description=self.rsentence())
        self.write_and_compare([block])

        block.annotate(**self.rdict(5))
        self.write_and_compare([block])
Esempio n. 5
0
 def read_block(self, lazy=False, cascade=True):
     block = Block(file_origin=self.filename)
     if cascade:
         block.segments.append(self.read_segment(lazy=lazy,
                                                 cascade=cascade))
         block.segments[-1].block = block
     return block
Esempio n. 6
0
    def read_block(self, lazy=False, cascade=True, channel_index=None):
        """
        Arguments:
            Channel_index: can be int, iterable or None to select one, many or all channel(s)

        """

        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._filename)
            blk.segments += [seg]

            if channel_index:
                if type(channel_index) is int: channel_index = [channel_index]
                if type(channel_index) is list:
                    channel_index = np.array(channel_index)
            else:
                channel_index = np.arange(0, self._attrs['shape'][1])

            chx = ChannelIndex(name='all channels', index=channel_index)
            blk.channel_indexes.append(chx)

            ana = self.read_analogsignal(channel_index=channel_index,
                                         lazy=lazy,
                                         cascade=cascade)
            ana.channel_index = chx
            seg.duration = (self._attrs['shape'][0] /
                            self._attrs['kwik']['sample_rate']) * pq.s

            # neo.tools.populate_RecordingChannel(blk)
        blk.create_many_to_one_relationship()
        return blk
Esempio n. 7
0
    def test_spiketrain_write(self):
        block = Block()
        seg = Segment()
        block.segments.append(seg)

        spiketrain = SpikeTrain(times=[3, 4, 5] * pq.s,
                                t_stop=10.0,
                                name="spikes!",
                                description="sssssspikes")
        seg.spiketrains.append(spiketrain)
        self.write_and_compare([block])

        waveforms = self.rquant((3, 5, 10), pq.mV)
        spiketrain = SpikeTrain(times=[1, 1.1, 1.2] * pq.ms,
                                t_stop=1.5 * pq.s,
                                name="spikes with wf",
                                description="spikes for waveform test",
                                waveforms=waveforms)

        seg.spiketrains.append(spiketrain)
        self.write_and_compare([block])

        spiketrain.left_sweep = np.random.random(10) * pq.ms
        self.write_and_compare([block])

        spiketrain.left_sweep = pq.Quantity(-10, "ms")
        self.write_and_compare([block])
Esempio n. 8
0
    def test__construct_subsegment_by_unit(self):
        nb_seg = 3
        nb_unit = 7
        unit_with_sig = [0, 2, 5]
        signal_types = ['Vm', 'Conductances']
        sig_len = 100

        #recordingchannelgroups
        rcgs = [ RecordingChannelGroup(name = 'Vm', channel_indexes = unit_with_sig),
                        RecordingChannelGroup(name = 'Conductance', channel_indexes = unit_with_sig), ]

        # Unit
        all_unit = [ ]
        for u in range(nb_unit):
            un = Unit(name = 'Unit #%d' % u, channel_indexes = [u])
            all_unit.append(un)

        bl = Block()
        for s in range(nb_seg):
            seg = Segment(name = 'Simulation %s' % s)
            for j in range(nb_unit):
                st = SpikeTrain([1, 2, 3], units = 'ms', t_start = 0., t_stop = 10)
                st.unit = all_unit[j]

            for t in signal_types:
                anasigarr = AnalogSignalArray( np.zeros((sig_len, len(unit_with_sig)) ), units = 'nA',
                                sampling_rate = 1000.*pq.Hz, channel_indexes = unit_with_sig )
                seg.analogsignalarrays.append(anasigarr)

        # what you want
        subseg = seg.construct_subsegment_by_unit(all_unit[:4])
Esempio n. 9
0
 def read_block(self,
                gid_list=None,
                time_unit=pq.ms,
                t_start=None,
                t_stop=None,
                sampling_period=None,
                id_column_dat=0,
                time_column_dat=1,
                value_columns_dat=2,
                id_column_gdf=0,
                time_column_gdf=1,
                value_types=None,
                value_units=None,
                lazy=False,
                cascade=True):
     seg = self.read_segment(gid_list, time_unit, t_start, t_stop,
                             sampling_period, id_column_dat,
                             time_column_dat, value_columns_dat,
                             id_column_gdf, time_column_gdf, value_types,
                             value_units, lazy, cascade)
     blk = Block(file_origin=seg.file_origin,
                 file_datetime=seg.file_datetime)
     blk.segments.append(seg)
     seg.block = blk
     return blk
Esempio n. 10
0
    def test_roundtrip_with_json_metadata(self):
        sample_data = np.random.uniform(size=(200, 3))
        filename = "test_roundtrip_with_json_metadata.txt"
        metadata_filename = "test_roundtrip_with_json_metadata_about.json"
        signal1 = AnalogSignal(sample_data,
                               units="pA",
                               sampling_rate=2 * pq.kHz)
        seg1 = Segment()
        block1 = Block()
        seg1.analogsignals.append(signal1)
        seg1.block = block1
        block1.segments.append(seg1)

        iow = AsciiSignalIO(filename, metadata_filename=metadata_filename)
        iow.write_block(block1)
        self.assert_(os.path.exists(metadata_filename))

        ior = AsciiSignalIO(filename)
        block2 = ior.read_block()
        assert len(block2.segments[0].analogsignals) == 3
        signal2 = block2.segments[0].analogsignals[1]

        assert_array_almost_equal(signal1.magnitude[:, 1],
                                  signal2.magnitude.reshape(-1),
                                  decimal=7)
        self.assertEqual(signal1.units, signal2.units)
        self.assertEqual(signal1.sampling_rate, signal2.sampling_rate)
        assert_array_equal(signal1.times, signal2.times)

        os.remove(filename)
        os.remove(metadata_filename)
Esempio n. 11
0
    def _read_block(self, node):
        attributes = self._get_standard_attributes(node)
        if "index" in attributes:
            attributes["index"] = int(attributes["index"])
        block = Block(**attributes)

        for name, child_node in node['segments'].items():
            if "Segment" in name:
                block.segments.append(
                    self._read_segment(child_node, parent=block))

        if len(node['recordingchannelgroups']) > 0:
            for name, child_node in node['recordingchannelgroups'].items():
                if "RecordingChannelGroup" in name:
                    block.channel_indexes.append(
                        self._read_recordingchannelgroup(child_node,
                                                         parent=block))
            self._resolve_channel_indexes(block)
        elif self.merge_singles:
            # if no RecordingChannelGroups are defined, merging
            # takes place here.
            for segment in block.segments:
                if hasattr(segment, 'unmerged_analogsignals'):
                    segment.analogsignals.extend(
                        self._merge_data_objects(
                            segment.unmerged_analogsignals))
                    del segment.unmerged_analogsignals
                if hasattr(segment, 'unmerged_irregularlysampledsignals'):
                    segment.irregularlysampledsignals.extend(
                        self._merge_data_objects(
                            segment.unmerged_irregularlysampledsignals))
                    del segment.unmerged_irregularlysampledsignals

        return block
Esempio n. 12
0
 def read_block(self, lazy=False, cascade=True, **kwargs):
     self._file = h5py.File(self.filename, 'r')
     self._lazy = lazy
     file_access_dates = self._file.get('file_create_date')
     if file_access_dates is None:
         file_creation_date = None
     else:
         file_access_dates = [parse_datetime(dt) for dt in file_access_dates]
         file_creation_date = file_access_dates[0]
     identifier = self._file.get('identifier').value
     if identifier == '_neo':  # this is an automatically generated name used if block.name is None
         identifier = None
     description = self._file.get('session_description').value
     if description == "no description":
         description = None
     block = Block(name=identifier,
                   description=description,
                   file_origin=self.filename,
                   file_datetime=file_creation_date,
                   rec_datetime=parse_datetime(self._file.get('session_start_time').value),
                   #index=?,
                   nwb_version=self._file.get('nwb_version').value,
                   file_access_dates=file_access_dates,
                   file_read_log='')
     if cascade:
         self._handle_general_group(block)
         self._handle_epochs_group(block)
         self._handle_acquisition_group(block)
         self._handle_stimulus_group(block)
         self._handle_processing_group(block)
         self._handle_analysis_group(block)
     self._lazy = False
     return block
Esempio n. 13
0
    def read_block(self, lazy=False, cascade=True):
        # TODO read block
        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._absolute_folder_path)

            for name in self._processing:
                if (name == "Position"):
                    seg.irregularlysampledsignals += self.read_tracking(
                        path="")
                if (name == "LFP"):
                    seg.analogsignals += self.read_analogsignal(path="")
                if (name == "EventWaveform"):
                    seg.spiketrains += self.read_spiketrain(path="")

                for key in self._processing[name]:
                    if (key == "Position"):
                        seg.irregularlysampledsignals += self.read_tracking(
                            path=name)
                    if (key == "LFP"):
                        seg.analogsignals += self.read_analogsignal(path=name)
                    if (key == "EventWaveform"):
                        seg.spiketrains += self.read_spiketrain(path=name)

            #blk.channel_indexes = self._channel_indexes

            blk.segments += [seg]

            # TODO add duration
            #seg.duration = self._duration

            # TODO May need to "populate_RecordingChannel"

        #blk.create_many_to_one_relationship()
        return blk
Esempio n. 14
0
 def test_annotations(self):
     self.testfilename = self.get_filename_path('nixio_fr_ann.nix')
     with NixIO(filename=self.testfilename, mode='ow') as io:
         annotations = {'my_custom_annotation': 'hello block'}
         bl = Block(**annotations)
         annotations = {'something': 'hello hello000'}
         seg = Segment(**annotations)
         an =AnalogSignal([[1, 2, 3], [4, 5, 6]], units='V',
                                     sampling_rate=1*pq.Hz)
         an.annotations['ansigrandom'] = 'hello chars'
         sp = SpikeTrain([3, 4, 5]* s, t_stop=10.0)
         sp.annotations['railway'] = 'hello train'
         ev = Event(np.arange(0, 30, 10)*pq.Hz,
                    labels=np.array(['trig0', 'trig1', 'trig2'], dtype='S'))
         ev.annotations['venue'] = 'hello event'
         ev2 = Event(np.arange(0, 30, 10) * pq.Hz,
                    labels=np.array(['trig0', 'trig1', 'trig2'], dtype='S'))
         ev2.annotations['evven'] = 'hello ev'
         seg.spiketrains.append(sp)
         seg.events.append(ev)
         seg.events.append(ev2)
         seg.analogsignals.append(an)
         bl.segments.append(seg)
         io.write_block(bl)
         io.close()
     with NixIOfr(filename=self.testfilename) as frio:
         frbl = frio.read_block()
         assert 'my_custom_annotation' in frbl.annotations
         assert 'something' in frbl.segments[0].annotations
         # assert 'ansigrandom' in frbl.segments[0].analogsignals[0].annotations
         assert 'railway' in frbl.segments[0].spiketrains[0].annotations
         assert 'venue' in frbl.segments[0].events[0].annotations
         assert 'evven' in frbl.segments[0].events[1].annotations
     os.remove(self.testfilename)
Esempio n. 15
0
    def read_block(self, lazy=False):
        assert not lazy, 'This IO does not support lazy mode'

        block = Block(file_origin=str(self.filename))
        block.segments.append(self.read_segment(lazy=lazy))
        block.segments[-1].block = block
        return block
Esempio n. 16
0
def random_block():
    block = Block(
        name=random_string(10),
        description=random_string(100),
        file_origin=random_string(20),
        file_datetime=random_datetime(),
        rec_datetime=random_datetime(),
        **random_annotations(6)
    )
    n_seg = random.randint(0, 5)
    for i in range(n_seg):
        seg = random_segment()
        block.segments.append(seg)
        seg.block = block
    children = list(block.data_children_recur)
    views = []
    for child in children:
        if isinstance(child, (AnalogSignal, IrregularlySampledSignal)):
            PROB_SIGNAL_HAS_VIEW = 0.5
            if np.random.random_sample() < PROB_SIGNAL_HAS_VIEW:
                chv = random_channelview(child)
                if chv:
                    views.append(chv)
    children.extend(views)
    n_groups = random.randint(0, 5)
    for i in range(n_groups):
        group = random_group(children)
        if group:
            block.groups.append(group)
            group.block = block
            children.append(group)  # this can give us nested groups
    return block
Esempio n. 17
0
    def read_block(self,
                   lazy=False,
                   cascade=True,
                   read_waveforms=True,
                   elphys_directory_name='electrophysiology'):
        '''

        '''
        blk = Block(file_origin=self._absolute_directory_path,
                    **self._exdir_directory.attrs.to_dict())
        seg = Segment(name='Segment #0', index=0)
        blk.segments.append(seg)
        if cascade:
            for group in self._epochs.values():
                epo = self.read_epoch(group.name, cascade, lazy)
                seg.epochs.append(epo)
            for channel_group in self._processing[
                    elphys_directory_name].values():
                chx = self.read_channelindex(channel_group.name,
                                             cascade=cascade,
                                             lazy=lazy,
                                             read_waveforms=read_waveforms)
                blk.channel_indexes.append(chx)
                seg.analogsignals.extend(chx.analogsignals)
                seg.spiketrains.extend(
                    [sptr for unit in chx.units for sptr in unit.spiketrains])
        return blk
Esempio n. 18
0
    def read_block(self, lazy=False, cascade=True):
        """

        """

        blk = Block()
        if cascade:
            seg = Segment(file_origin=self._absolute_filename)

            blk.channel_indexes = self._channel_indexes

            blk.segments += [seg]

            seg.analogsignals = self.read_analogsignal(lazy=lazy,
                                                       cascade=cascade)
            try:
                seg.irregularlysampledsignals = self.read_tracking()
            except Exception as e:
                print('Warning: unable to read tracking')
                print(e)
            seg.spiketrains = self.read_spiketrain()

            # TODO Call all other read functions

            seg.duration = self._duration

            # TODO May need to "populate_RecordingChannel"

            # spiketrain = self.read_spiketrain()

            # seg.spiketrains.append()

        blk.create_many_to_one_relationship()
        return blk
Esempio n. 19
0
    def test_segment_write(self):
        block = Block(name=self.rword())
        segment = Segment(name=self.rword(), description=self.rword())
        block.segments.append(segment)
        self.write_and_compare([block])

        segment.annotate(**self.rdict(2))
        self.write_and_compare([block])
Esempio n. 20
0
 def _block_to_neo(self, nix_block):
     neo_attrs = self._nix_attr_to_neo(nix_block)
     neo_block = Block(**neo_attrs)
     neo_block.rec_datetime = datetime.fromtimestamp(
         nix_block.created_at
     )
     self._neo_map[nix_block.name] = neo_block
     return neo_block
Esempio n. 21
0
    def test__children(self):
        blk = Block(name='block1')
        blk.segments = [self.seg1]
        blk.create_many_to_one_relationship(force=True)
        assert_neo_object_is_compliant(self.seg1)
        assert_neo_object_is_compliant(blk)

        childobjs = ('AnalogSignal', 'Epoch', 'Event',
                     'IrregularlySampledSignal', 'SpikeTrain', 'ImageSequence')
        childconts = ('analogsignals', 'epochs', 'events',
                      'irregularlysampledsignals', 'spiketrains',
                      'imagesequences')
        self.assertEqual(self.seg1._container_child_objects, ())
        self.assertEqual(self.seg1._data_child_objects, childobjs)
        self.assertEqual(self.seg1._single_parent_objects, ('Block', ))
        self.assertEqual(self.seg1._multi_child_objects, ())
        self.assertEqual(self.seg1._multi_parent_objects, ())
        self.assertEqual(self.seg1._child_properties, ())

        self.assertEqual(self.seg1._single_child_objects, childobjs)
        self.assertEqual(self.seg1._container_child_containers, ())
        self.assertEqual(self.seg1._data_child_containers, childconts)
        self.assertEqual(self.seg1._single_child_containers, childconts)
        self.assertEqual(self.seg1._single_parent_containers, ('block', ))
        self.assertEqual(self.seg1._multi_child_containers, ())
        self.assertEqual(self.seg1._multi_parent_containers, ())

        self.assertEqual(self.seg1._child_objects, childobjs)
        self.assertEqual(self.seg1._child_containers, childconts)
        self.assertEqual(self.seg1._parent_objects, ('Block', ))
        self.assertEqual(self.seg1._parent_containers, ('block', ))

        totchildren = (
            self.nchildren * 2 +  # epoch/event
            self.nchildren +  # analogsignal
            self.nchildren**2 +  # spiketrain
            self.nchildren +  # irregsignal
            self.nchildren)  # imagesequence
        self.assertEqual(len(self.seg1._single_children), totchildren)
        self.assertEqual(len(self.seg1.data_children), totchildren)
        self.assertEqual(len(self.seg1.children), totchildren)
        self.assertEqual(len(self.seg1.data_children_recur), totchildren)
        self.assertEqual(len(self.seg1.children_recur), totchildren)

        self.assertEqual(len(self.seg1._multi_children), 0)
        self.assertEqual(len(self.seg1.container_children), 0)
        self.assertEqual(len(self.seg1.container_children_recur), 0)

        children = (self.sigarrs1a + self.epcs1a + self.evts1a +
                    self.irsigs1a + self.trains1a + self.img_seqs1a)
        assert_same_sub_schema(list(self.seg1._single_children), children)
        assert_same_sub_schema(list(self.seg1.data_children), children)
        assert_same_sub_schema(list(self.seg1.data_children_recur), children)
        assert_same_sub_schema(list(self.seg1.children), children)
        assert_same_sub_schema(list(self.seg1.children_recur), children)

        self.assertEqual(len(self.seg1.parents), 1)
        self.assertEqual(self.seg1.parents[0].name, 'block1')
Esempio n. 22
0
def proc_dam(filename):
    '''Load an dam file that has already been processed by the official matlab
    file converter.  That matlab data is saved to an m-file, which is then
    converted to a numpy '.npz' file.  This numpy file is the file actually
    loaded.  This function converts it to a neo block and returns the block.
    This block can be compared to the block produced by BrainwareDamIO to
    make sure BrainwareDamIO is working properly

    block = proc_dam(filename)

    filename: The file name of the numpy file to load.  It should end with
    '*_dam_py?.npz'. This will be converted to a neo 'file_origin' property
    with the value '*.dam', so the filename to compare should fit that pattern.
    'py?' should be 'py2' for the python 2 version of the numpy file or 'py3'
    for the python 3 version of the numpy file.

    example: filename = 'file1_dam_py2.npz'
             dam file name = 'file1.dam'
    '''
    with np.load(filename) as damobj:
        damfile = damobj.items()[0][1].flatten()

    filename = os.path.basename(filename[:-12] + '.dam')

    signals = [res.flatten() for res in damfile['signal']]
    stimIndexes = [int(res[0, 0].tolist()) for res in damfile['stimIndex']]
    timestamps = [res[0, 0] for res in damfile['timestamp']]

    block = Block(file_origin=filename)

    chx = ChannelIndex(file_origin=filename,
                       index=np.array([0]),
                       channel_ids=np.array([1]),
                       channel_names=np.array(['Chan1'], dtype='S'))

    block.channel_indexes.append(chx)

    params = [res['params'][0, 0].flatten() for res in damfile['stim']]
    values = [res['values'][0, 0].flatten() for res in damfile['stim']]
    params = [[res1[0] for res1 in res] for res in params]
    values = [[res1 for res1 in res] for res in values]
    stims = [dict(zip(param, value)) for param, value in zip(params, values)]

    fulldam = zip(stimIndexes, timestamps, signals, stims)
    for stimIndex, timestamp, signal, stim in fulldam:
        sig = AnalogSignal(signal=signal * pq.mV,
                           t_start=timestamp * pq.d,
                           file_origin=filename,
                           sampling_period=1. * pq.s)
        segment = Segment(file_origin=filename,
                          index=stimIndex,
                          **stim)
        segment.analogsignals = [sig]
        block.segments.append(segment)

    block.create_many_to_one_relationship()

    return block
Esempio n. 23
0
    def read_block(self, lazy=False, cascade=True, **kargs):
        '''
        Reads a block from the simple spike data file "fname" generated
        with BrainWare
        '''

        # there are no keyargs implemented to so far.  If someone tries to pass
        # them they are expecting them to do something or making a mistake,
        # neither of which should pass silently
        if kargs:
            raise NotImplementedError('This method does not have any '
                                      'argument implemented yet')
        self._fsrc = None
        self.__lazy = lazy

        self._blk = Block(file_origin=self._filename)
        block = self._blk

        # if we aren't doing cascade, don't load anything
        if not cascade:
            return block

        # create the objects to store other objects
        chx = ChannelIndex(file_origin=self._filename,
                           index=np.array([], dtype=np.int))
        self.__unit = Unit(file_origin=self._filename)

        # load objects into their containers
        block.channel_indexes.append(chx)
        chx.units.append(self.__unit)

        # initialize values
        self.__t_stop = None
        self.__params = None
        self.__seg = None
        self.__spiketimes = None

        # open the file
        with open(self._path, 'rb') as self._fsrc:
            res = True
            # while the file is not done keep reading segments
            while res:
                res = self.__read_id()

        block.create_many_to_one_relationship()

        # cleanup attributes
        self._fsrc = None
        self.__lazy = False

        self._blk = None

        self.__t_stop = None
        self.__params = None
        self.__seg = None
        self.__spiketimes = None

        return block
Esempio n. 24
0
    def read_block(self, lazy=False):

        assert os.path.exists(self.filename), "Cannot locate file: {}".format(
            self.filename)

        with NWBHDF5IO(self.filename, mode='r') as io:
            nwb = io.read()

        blk = Block()
Esempio n. 25
0
    def test_signals_compound_units(self):
        block = Block()
        seg = Segment()
        block.segments.append(seg)

        units = pq.CompoundUnit("1/30000*V")
        srate = pq.Quantity(10, pq.CompoundUnit("1.0/10 * Hz"))
        asig = AnalogSignal(signal=self.rquant((10, 3), units),
                            sampling_rate=srate)
        seg.analogsignals.append(asig)

        self.write_and_compare([block])

        anotherblock = Block("ir signal block")
        seg = Segment("ir signal seg")
        anotherblock.segments.append(seg)
        irsig = IrregularlySampledSignal(signal=np.random.random((20, 3)),
                                         times=self.rquant(
                                             20, pq.CompoundUnit("0.1 * ms"),
                                             True),
                                         units=pq.CompoundUnit("10 * V / s"))
        seg.irregularlysampledsignals.append(irsig)
        self.write_and_compare([block, anotherblock])

        block.segments[0].analogsignals.append(
            AnalogSignal(signal=[10.0, 1.0, 3.0],
                         units=pq.S,
                         sampling_period=pq.Quantity(3, "s"),
                         dtype=np.double,
                         name="signal42",
                         description="this is an analogsignal",
                         t_start=45 * pq.CompoundUnit("3.14 * s")), )
        self.write_and_compare([block, anotherblock])

        times = self.rquant(10, pq.CompoundUnit("3 * year"), True)
        block.segments[0].irregularlysampledsignals.append(
            IrregularlySampledSignal(times=times,
                                     signal=np.random.random((10, 3)),
                                     units="mV",
                                     dtype=np.float,
                                     name="some sort of signal",
                                     description="the signal is described"))

        self.write_and_compare([block, anotherblock])
def nuevobloque(bloqueviejo, umbral=100, desde=50, hasta=500, cual=0):
    '''funcion que a partir de un Bloque de neo
 crea otro recortado y alineado'''
    result = Block()
    for j in bloqueviejo.segments:
        #Toma el primer canal (activo), de cada analogsignal y la recorta
        estimindex = detectaestimulo(j.analogsignals[0], umbral, cual)
        cacho = recortasegnal(j.analogsignals[0], estimindex, desde, hasta)
        result.segments.append(cacho)
    return result
Esempio n. 27
0
    def test_channel_index_write(self):
        block = Block(name=self.rword())
        chx = ChannelIndex(name=self.rword(),
                           description=self.rsentence(),
                           index=[1, 2, 3, 5, 8, 13])
        block.channel_indexes.append(chx)
        self.write_and_compare([block])

        chx.annotate(**self.rdict(3))
        self.write_and_compare([block])
Esempio n. 28
0
def import_dapsys_csv_files(directory: str,
                            sampling_rate: Union[Quantity, str] = "imply",
                            ap_correlation_window_size: Quantity = Quantity(0.003, "s")) \
                            -> Tuple[Block, Dict[TypeID, Dict[str, str]], List[APTrack]]:

    csv_files = _get_files_with_extension(directory, ".csv")

    main_pulses: Event = _read_main_pulse_file(filepaths=csv_files)
    irregular_sig: IrregularlySampledSignal = _read_signal_file(
        filepaths=csv_files, signal_unit="uV")

    if isinstance(sampling_rate, str) and sampling_rate == "imply":
        sampling_rate = _imply_sampling_rate_from_irregular_signal(
            irregular_sig)

    analog_sig: AnalogSignal = convert_irregularly_sampled_signal_to_analog_signal(
        irregular_sig, sampling_rate=sampling_rate)
    analog_sig.annotate(id=f"{TypeID.RAW_DATA.value}.1",
                        type_id=TypeID.RAW_DATA.value)

    ap_tracks: List[APTrack] = _read_track_files(filepaths=csv_files,
                                                 el_stimuli=main_pulses,
                                                 sampling_rate=sampling_rate)
    track_aps: SpikeTrain = _find_action_potentials_on_tracks(
        ap_tracks=ap_tracks,
        el_stimuli=main_pulses,
        signal=irregular_sig,
        window_size=ap_correlation_window_size,
        sampling_rate=sampling_rate)

    # create mapping from names to channel ids
    channel_id_map = {type_id: {} for type_id in TypeID}
    channel_id_map[TypeID.ELECTRICAL_STIMULUS].update(
        {"Main Pulse": main_pulses.annotations["id"]})
    channel_id_map[TypeID.RAW_DATA].update({
        "Analog Signal":
        analog_sig.annotations["id"],
        "Irregular Signal":
        irregular_sig.annotations["id"]
    })
    channel_id_map[TypeID.ACTION_POTENTIAL].update(
        {"Track APs": track_aps.annotations["id"]})

    # produce the corresponding NEO objects
    block: Block = Block(name="Base block of dapsys csv recording")
    segment: Segment = Segment(name="This recording consists of one segment")

    segment.events.append(main_pulses)
    segment.irregularlysampledsignals.append(irregular_sig)
    segment.analogsignals.append(analog_sig)
    segment.spiketrains.append(track_aps)

    block.segments.append(segment)

    return block, channel_id_map, ap_tracks
Esempio n. 29
0
    def read_block(self, lazy=False, **kwargs):
        # to sort file
        def natural_sort(l):
            convert = lambda text: int(text) if text.isdigit() else text.lower(
            )
            alphanum_key = lambda key: [
                convert(c) for c in re.split('([0-9]+)', key)
            ]
            return sorted(l, key=alphanum_key)

        # find all the images in the given directory
        file_name_list = []
        # name of extensions to track
        types = ["*.tif", "*.tiff"]
        for file in types:
            file_name_list.append(glob.glob(self.filename + "/" + file))
        # flatten list
        file_name_list = [
            item for sublist in file_name_list for item in sublist
        ]
        # delete path in the name of file
        file_name_list = [
            file_name[len(self.filename) + 1::] for file_name in file_name_list
        ]
        # sorting file
        file_name_list = natural_sort(file_name_list)
        list_data_image = []
        for file_name in file_name_list:
            list_data_image.append(
                np.array(Image.open(self.filename + "/" + file_name),
                         dtype=np.float))
        list_data_image = np.array(list_data_image)
        if len(list_data_image.shape) == 4:
            list_data_image = []
            for file_name in file_name_list:
                list_data_image.append(
                    np.array(Image.open(self.filename + "/" +
                                        file_name).convert('L'),
                             dtype=np.float))

        print("read block")
        image_sequence = ImageSequence(np.stack(list_data_image),
                                       units=self.units,
                                       sampling_rate=self.sampling_rate,
                                       spatial_scale=self.spatial_scale)
        print("creating segment")
        segment = Segment(file_origin=self.filename)
        segment.annotate(tiff_file_names=file_name_list)
        segment.imagesequences = [image_sequence]

        block = Block(file_origin=self.filename)
        segment.block = block
        block.segments.append(segment)
        print("returning block")
        return block
Esempio n. 30
0
    def test_event_write(self):
        block = Block()
        seg = Segment()
        block.segments.append(seg)

        event = Event(times=np.arange(0, 30, 10) * pq.s,
                      labels=np.array(["0", "1", "2"]),
                      name="event name",
                      description="event description")
        seg.events.append(event)
        self.write_and_compare([block])