Ejemplo n.º 1
0
    def load_blackrock_data_neo(base_fn):
        neo_io = BlackrockIO(filename=base_fn)
        neo_block = neo_io.read_block(lazy=False,
                                      cascade=True,
                                      n_starts=None,
                                      n_stops=None,
                                      channels='all',
                                      nsx_to_load=5,
                                      scaling='voltage',
                                      units='none',
                                      load_waveforms=False,
                                      load_events=True)

        return {
            'ana_times':
            neo_block.segments[0].analogsignals[0].times,
            'ana_data':
            np.asarray([x for x in neo_block.segments[0].analogsignals])[:, :,
                                                                         0],
            'samp_per_s':
            neo_block.segments[0].analogsignals[0].sampling_rate,
            'chan_labels': [
                x.name.decode('utf8')
                for x in neo_block.segments[0].analogsignals
            ],
            'ev_times':
            neo_block.segments[0].events[0].times,
            'ev_depths':
            np.asarray([
                float(x.split(':')[1])
                for x in neo_block.segments[0].events[0].labels
            ])
        }
Ejemplo n.º 2
0
    def load(self, files, channel):
        """
        Loads the neo blocks from the given files and the given channel.
        
        If the blocks are cached the cached ones will be loaded, 
        if not, the normal loading routine of the IOs will be used 
        and after that the blocks will be cached.
        
        **Arguments**
        
            *files* (list of string):
                The files that should be loaded.
            *channel* (integer):
                The channel that should be loaded.
        
        """
        #information for the progress
        l = len(files)
#         if not self.rgios:
#             v = 50
#             step = int(50/l)
#         else:
#             v = 0
#             step = int(100/l)
        v = 0
        step = int(100/l)
        
        self.delete_blocks()
        blocks = []
        #loading the IOs
#         rgios = self.load_rgIOs(files)
        
        #loading the blocks
        for i, f in enumerate(files):
                name = join(self.cdir, split(f)[1] + "_" + str(channel) + ".pkl")
 
                if exists(name):
                    #load from cache
                    pIO = PickleIO(name)
                    block = pIO.read_block()
                else:
                    #loading
#                     rgIO = rgios[i]
                    rgIO = BlackrockIO(f)
                    block = rgIO.read_block(channel_list=[channel], units=range(1, 17), nsx=None, waveforms=True)
                    del rgIO
                    #caching
                    pIO = PickleIO(name)
                    pIO.write_block(block)
                 
                blocks.append(block)
                #emits a signal with the current progress
                #after loading a block
                self.progress.emit(v+step*(i+1))

        nums = [len(b.recordingchannelgroups[0].units) for b in blocks]
        self.blocks = blocks
        self.nums = nums
Ejemplo n.º 3
0
    def test_segment_detection_reset(self):
        """
        This test makes sure segments are detected correctly when reset was used during recording.
        """

        # Path to nev that will fail
        filename_nev_fail = self.get_filename_path('segment/ResetFail/reset_fail')
        # Path to nsX and nev that will NOT fail
        filename = self.get_filename_path('segment/ResetCorrect/reset')

        # Warning filter needs to be set to always before first occurrence of this warning
        warnings.simplefilter("always", UserWarning)

        # This fails, because in the nev there is no way to separate two segments
        with self.assertRaises(AssertionError):
            reader = BlackrockIO(filename=filename, nsx_to_load=2, nev_override=filename_nev_fail)

        # The correct file will issue a warning because a reset has occurred
        # and could be detected, but was not explicitly documented in the file
        with warnings.catch_warnings(record=True) as w:
            reader = BlackrockIO(filename=filename, nsx_to_load=2)
            self.assertGreaterEqual(len(w), 1)
            messages = [str(warning.message) for warning in w if warning.category == UserWarning]
            self.assertIn("Detected 1 undocumented segments within nev data after "
                          "timestamps [5451].", messages)

        # Manually reset warning filter in order to not show too many warnings afterwards
        warnings.simplefilter("default")

        block = reader.read_block(load_waveforms=False, signal_group_mode="split-all")

        # 1 Segment at the beginning and 1 after reset
        self.assertEqual(len(block.segments), 2)
        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        self.assertEqual(block.segments[0].t_stop, 4.02)
        # Clock is reset to 0
        self.assertEqual(block.segments[1].t_start, 0.0032)
        self.assertEqual(block.segments[1].t_stop, 3.9842)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.02)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 0.0032)
        self.assertEqual(block.segments[1].analogsignals[0].t_stop, 3.9842)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertEqual(block.segments[0].spiketrains[0].t_stop, 4.02)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 0.0032)
        self.assertEqual(block.segments[1].spiketrains[0].t_stop, 3.9842)

        # Each segment must have the same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # Length of analogsignals as created
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4020)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 3981)
    def test_segment_detection_reset(self):
        """
        This test makes sure segments are detected correctly when reset was used during recording.
        """

        # Path to nev that will fail
        filename_nev_fail = self.get_filename_path('segment/ResetFail/reset_fail')
        # Path to nsX and nev that will NOT fail
        filename = self.get_filename_path('segment/ResetCorrect/reset')

        # Warning filter needs to be set to always before first occurrence of this warning
        warnings.simplefilter("always", UserWarning)

        # This fails, because in the nev there is no way to separate two segments
        with self.assertRaises(AssertionError):
            reader = BlackrockIO(filename=filename, nsx_to_load=2, nev_override=filename_nev_fail)

        # The correct file will issue a warning because a reset has occurred
        # and could be detected, but was not explicitly documented in the file
        with warnings.catch_warnings(record=True) as w:
            reader = BlackrockIO(filename=filename, nsx_to_load=2)
            self.assertGreaterEqual(len(w), 1)
            messages = [str(warning.message) for warning in w if warning.category == UserWarning]
            self.assertIn("Detected 1 undocumented segments within nev data after "
                          "timestamps [5451].", messages)

        # Manually reset warning filter in order to not show too many warnings afterwards
        warnings.simplefilter("default")

        block = reader.read_block(load_waveforms=False, signal_group_mode="split-all")

        # 1 Segment at the beginning and 1 after reset
        self.assertEqual(len(block.segments), 2)
        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        self.assertEqual(block.segments[0].t_stop, 4.02)
        # Clock is reset to 0
        self.assertEqual(block.segments[1].t_start, 0.0032)
        self.assertEqual(block.segments[1].t_stop, 3.9842)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.02)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 0.0032)
        self.assertEqual(block.segments[1].analogsignals[0].t_stop, 3.9842)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertEqual(block.segments[0].spiketrains[0].t_stop, 4.02)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 0.0032)
        self.assertEqual(block.segments[1].spiketrains[0].t_stop, 3.9842)

        # Each segment must have the same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # Length of analogsignals as created
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4020)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 3981)
Ejemplo n.º 5
0
    def test_inputs_V21(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """
        filename = self.get_filename_path('blackrock_2_1/l101210-001')
        reader = BlackrockIO(filename=filename, verbose=False, nsx_to_load=5)

        # Assert IOError is raised when no Blackrock files are available
        with self.assertRaises(IOError):
            reader2 = BlackrockIO(filename='nonexistent')
        # with self.assertRaises(IOError):
        #     reader2 = BlackrockIO(filename=filename, nev_override='nonexistent')

        # Load data to maximum extent, one None is not given as list
        block = reader.read_block(load_waveforms=False, signal_group_mode='split-all')
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        buggy_slice = (-100 * pq.ms, too_large_tstop)

        # This is valid in read_segment
        seg = reader.read_segment(seg_index=0, time_slice=buggy_slice, strict_slicing=False)
        # this raise error
        with self.assertRaises(AssertionError):
            seg = reader.read_segment(seg_index=0, time_slice=buggy_slice, strict_slicing=True)

        lenb = len(seg.analogsignals[0])
        numspb = len(seg.spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # test 4 Units
        block = reader.read_block(load_waveforms=True,
                                signal_group_mode='split-all',
                              units_group_mode='all-in-one')

        self.assertEqual(len(block.segments[0].analogsignals), 96)
        self.assertEqual(len(block.channel_indexes[-1].units), 218)
        self.assertEqual(len(block.channel_indexes[-1].units),
                         len(block.segments[0].spiketrains))

        anasig = block.segments[0].analogsignals[0]
        self.assertIsNotNone(anasig.file_origin)
Ejemplo n.º 6
0
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """
        filename = self.get_filename_path('FileSpec2.3001')
        reader = BlackrockIO(
            filename=filename,
            verbose=False,
            nsx_to_load=5,
        )

        # Load data to maximum extent, one None is not given as list
        block = reader.read_block(load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        buggy_slice = (-100 * pq.ms, too_large_tstop)

        # this is valid in read_segment because seg_index is specified
        seg = reader.read_segment(seg_index=0, time_slice=buggy_slice)

        lenb = len(seg.analogsignals[0])
        numspb = len(seg.spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # test 4 Units
        block = reader.read_block(load_waveforms=True,
                                  units_group_mode='all-in-one')

        self.assertEqual(len(block.segments[0].analogsignals), 10)
        self.assertEqual(len(block.channel_indexes[-1].units), 4)
        self.assertEqual(len(block.channel_indexes[-1].units),
                         len(block.segments[0].spiketrains))

        anasig = block.segments[0].analogsignals[0]
        self.assertIsNotNone(anasig.file_origin)
    def test_load_muliple_nsx(self):
        """
        Test if multiple nsx signals can be loaded at the same time.
        """
        filename = self.get_filename_path('blackrock_2_1/l101210-001')
        reader = BlackrockIO(filename=filename, verbose=False, nsx_to_load='all')

        # number of different sampling rates corresponds to number of nsx signals, because
        # single nsx contains only signals of identical sampling rate
        block = reader.read_block(load_waveforms=False)
        sampling_rates = np.unique(
            [a.sampling_rate.rescale('Hz') for a in block.filter(objects='AnalogSignal')])
        self.assertEqual(len(sampling_rates), len(reader._selected_nsx))

        segment = reader.read_segment()
        sampling_rates = np.unique(
            [a.sampling_rate.rescale('Hz') for a in segment.filter(objects='AnalogSignal')])
        self.assertEqual(len(sampling_rates), len(reader._selected_nsx))
Ejemplo n.º 8
0
    def test_inputs_V21(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """
        filename = self.get_local_path('blackrock/blackrock_2_1/l101210-001')
        reader = BlackrockIO(filename=filename, verbose=False, nsx_to_load=5)

        # Assert IOError is raised when no Blackrock files are available
        with self.assertRaises(IOError):
            reader2 = BlackrockIO(filename='nonexistent')
        # with self.assertRaises(IOError):
        #     reader2 = BlackrockIO(filename=filename, nev_override='nonexistent')

        # Load data to maximum extent, one None is not given as list
        block = reader.read_block(load_waveforms=False,
                                  signal_group_mode='split-all')
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        buggy_slice = (-100 * pq.ms, too_large_tstop)

        # This is valid in read_segment
        seg = reader.read_segment(seg_index=0,
                                  time_slice=buggy_slice,
                                  strict_slicing=False)
        # this raise error
        with self.assertRaises(AssertionError):
            seg = reader.read_segment(seg_index=0,
                                      time_slice=buggy_slice,
                                      strict_slicing=True)

        lenb = len(seg.analogsignals[0])
        numspb = len(seg.spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # test 4 Units
        block = reader.read_block(
            load_waveforms=True,
            signal_group_mode='split-all',
        )  #units_group_mode='all-in-one')

        self.assertEqual(len(block.segments[0].analogsignals), 96)
        #self.assertEqual(len(block.channel_indexes[-1].units), 218)
        #self.assertEqual(len(block.channel_indexes[-1].units),
        #                 len(block.segments[0].spiketrains))
        anasig = block.segments[0].analogsignals[0]
        self.assertIsNotNone(anasig.file_origin)
Ejemplo n.º 9
0
    def load_blackrock_data_neo(base_fn):
        neo_io = BlackrockIO(filename=base_fn, nsx_to_load=5)
        neo_block = neo_io.read_block(lazy=False, load_waveforms=False)
        comments = neo_io.nev_data['Comments']

        return {
            'ana_times':
            neo_block.segments[0].analogsignals[0].times,
            'ana_data':
            np.asarray([x for x in neo_block.segments[0].analogsignals])[:, :,
                                                                         0],
            'samp_per_s':
            neo_block.segments[0].analogsignals[0].sampling_rate,
            'chan_labels':
            [x.name for x in neo_block.segments[0].analogsignals],
            'ev_times':
            pq.s * comments['timestamp'] / 30000,
            'ev_depths':
            np.asarray([
                float(x.decode('utf8').split(':')[1])
                for x in comments['comment']
            ])
        }
Ejemplo n.º 10
0
def new_brio_load():
    newbrio_reader = BlackrockIO(dirname, nsx_to_load=2)#, nev_override='-')  # channels_to_load={1, 3, 5, 7, 95})#, nev_override='-'.join([dirname, '03']))
    newbrio_reader2 = BlackrockIO(dirname2, nsx_to_load=2)#, nev_override='-')  # channels_to_load={1, 3, 5, 7, 95})#, nev_override='-'.join([dirname, '03']))
    # new_block = newbrio_reader.read_block()
    # print(newbrio_reader)
    # newbrio_reader.parse_header()
    new_block = newbrio_reader.read_block(load_waveforms=False, signal_group_mode="split-all")#, nsx_to_load='all')#, time_slices=[(float('-inf'), float('inf')), (1*pq.s, 2*pq.s), (3*pq.s, 4 * pq.s)])#(1 *pq.s, 2 * pq.s)])
    new_block2 = newbrio_reader2.read_block(load_waveforms=False, signal_group_mode="split-all")#, nsx_to_load='all')#, time_slices=[(float('-inf'), float('inf')), (1*pq.s, 2*pq.s), (3*pq.s, 4 * pq.s)])#(1 *pq.s, 2 * pq.s)])
                                            #, signal_group_mode="group-by-same-units") #, time_slices=[(1.0, 40.0)])#, time_slices=[(0.001366667*pq.s, 2.0*pq.s)])  # signal_group_mode="group-by-same-units")#load_waveforms=True)
    # output(new_block)
    print ('Loading new IO done')
    return new_block, new_block2
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file to check for parsing errors.
        """

        # Turns false on error
        allok = True

        try:
            b = BlackrockIO(os.path.join(tempfile.gettempdir(), 'files_for_testing_neo', 'blackrock', 'FileSpec2.3001'), print_diagnostic=False)

            # Load data to maximum extent, one None is not given as list
            block = b.read_block(n_starts=[None], n_stops=None, channel_list=range(1, 9), nsx=5, units=[], events=True, waveforms=False)
            lena = len(block.segments[0].analogsignals[0])
            numspa = len(block.segments[0].spiketrains[0])

            # Load data with very long extent using a negative time and the get_max_time() method
            block = b.read_block(n_starts=[-100 * pq.ms], n_stops=[b.get_max_time()], channel_list=range(1, 9), nsx=[5], units=[], events=False, waveforms=False)
            lenb = len(block.segments[0].analogsignals[0])
            numspb = len(block.segments[0].spiketrains[0])

            # Same length of analog signal? Both should have read the complete data set!
            if lena != lenb :
                allok = False
            # Same length of spike train? Both should have read the complete data set!
            if numspa != numspb:
                allok = False

            # Load data with very long extent, n_starts and n_stops not given as list
            block = b.read_block(n_starts=100 * b.nsx_unit[5], n_stops=200 * b.nsx_unit[5], channel_list=range(1, 9), nsx=5, units=[], events=False, waveforms=False)
            lena = len(block.segments[0].analogsignals[0])

            block = b.read_block(n_starts=301 * b.nsx_unit[5], n_stops=401 * b.nsx_unit[5], channel_list=range(1, 9), nsx=5, units=[], events=False, waveforms=False)
            lenb = len(block.segments[0].analogsignals[0])

            # Same length?
            if lena != lenb:
                allok = False

            # Length should be 100 samples exactly
            if lena != 100:
                allok = False

            # Load partial data types and check if this is selection is made
            block = b.read_block(n_starts=None, n_stops=None, channel_list=range(1, 9), nsx=5, units=None, events=False, waveforms=True)
            if len(block.segments) != 1:
                allok = False
            if len(block.segments[0].analogsignals) != 8:
                allok = False
            if len(block.recordingchannelgroups) != 8:
                allok = False
            if len(block.recordingchannelgroups[0].units) != 0:
                allok = False
            if len(block.segments[0].eventarrays) != 0:
                allok = False
            if len(block.segments[0].spiketrains) != 0:
                allok = False

            block = b.read_block(n_starts=[None, 3000 * pq.ms], n_stops=[1000 * pq.ms, None], channel_list=range(1, 9), nsx=None, units={1:0, 5:0, 6:0}, events=True, waveforms=True)
            if len(block.segments) != 2:
                allok = False
            if len(block.segments[0].analogsignals) != 0:
                allok = False
            if len(block.recordingchannelgroups) != 8:
                allok = False
            if len(block.recordingchannelgroups[0].units) != 1:
                allok = False
            # if len(block.recordingchannelgroups[4].units) != 0:  # only one of two neurons on channel 78, and only one unit for two segments!
            #    allok = False
            if len(block.segments[0].eventarrays) == 0:
                allok = False
            if len(block.segments[0].spiketrains[0].waveforms) == 0:
                allok = False

        except:
            allok = False

        self.assertTrue(allok)
Ejemplo n.º 12
0
    def read_block(
            self, index=None, name=None, description=None, nsx_to_load='none',
            n_starts=None, n_stops=None, channels=range(1, 97), units='none',
            load_waveforms=False, load_events=False, scaling='raw',
            lazy=False, cascade=True, corrections=False):
        """
        Args:
            index (None, int):
                If not None, index of block is set to user input.
            name (None, str):
                If None, name is set to default, otherwise it is set to user
                input.
            description (None, str):
                If None, description is set to default, otherwise it is set to
                user input.
            nsx_to_load (int, list, str):
                ID(s) of nsx file(s) from which to load data, e.g., if set to
                5 only data from the ns5 file are loaded. If 'none' or empty
                list, no nsx files and therefore no analog signals are loaded.
                If 'all', data from all available nsx are loaded.
            n_starts (None, Quantity, list):
                Start times for data in each segment. Number of entries must be
                equal to length of n_stops. If None, intrinsic recording start
                times of files set are used.
            n_stops (None, Quantity, list):
                Stop times for data in each segment. Number of entries must be
                equal to length of n_starts. If None, intrinsic recording stop
                times of files set are used.
            channels (int, list, str):
                Channel id(s) from which to load data. If 'none' or empty list,
                no channels and therefore no analog signal or spiketrains are
                loaded. If 'all', all available channels are loaded.
            units (int, list, str, dict):
                ID(s) of unit(s) to load. If 'none' or empty list, no units and
                therefore no spiketrains are loaded. If 'all', all available
                units are loaded. If dict, the above can be specified
                individually for each channel (keys), e.g. {1: 5, 2: 'all'}
                loads unit 5 from channel 1 and all units from channel 2.
            load_waveforms (boolean):
                If True, waveforms are attached to all loaded spiketrains.
            load_events (boolean):
                If True, all recorded events are loaded.
            scaling (str):
                Determines whether time series of individual
                electrodes/channels are returned as AnalogSignals containing
                raw integer samples ('raw'), or scaled to arrays of floats
                representing voltage ('voltage'). Note that for file
                specification 2.1 and lower, the option 'voltage' requires a
                nev file to be present.
            lazy (bool):
                If True, only the shape of the data is loaded.
            cascade (bool or "lazy"):
                If True, only the block without children is returned.
            corrections (bool):
                If True, gap correction data are loaded from
                'corrections.txt' and spike times are shifted if for spikes
                after gap occurrence. Default: False

        Returns (neo.Block):
            Annotations:
                avail_file_set (list):
                    List of extensions of all available files for the given
                    recording.
                avail_nsx (boolean):
                    List of available nsx ids (int).
                avail_nev (boolean):
                    True if nev is available.
                avail_sif (boolean):
                    True if sif is available.
                avail_ccf (boolean):
                    True if ccf is available.
                rec_pauses (boolean):
                    True if at least one recording pause occurred.
                nb_segments (int):
                    Number of created segments after merging recording times
                    specified by user with the intrinsic ones of the file set.
        """

        if corrections:
            ### next 35 are copied from rgio.py
            #reading correction parameters from 'corrections.txt' file and saving them
            #gap_corrections = [gap_start_bin,gap_size_bins]
            #TODO: Make this more general, use time resolution from BRIO
            timestamp_res = 30000
            gap_corrections = [None,None]
            if corrections:
                try:
                    correction_file = open(os.path.dirname(__file__) + '/corrections.txt', 'r')
                    for line in correction_file:
                        if os.path.basename(self.filename) in line:
                            numbers = [int(s) for s in line.split() if s.isdigit()]
                            if len(numbers)==2:
                                gap_corrections =(
                                    numbers *
                                    np.array(1.0)*pq.CompoundUnit(
                                            '1.0/%i*s'%(timestamp_res)))
                            else:
                                warnings.warn('Wrong number of integers in corrections.txt for session %s'%os.path.basename(self.filename))
                            break
                    correction_file.close()
                except IOError:
                    warnings.warn('No file "corrections.txt" found.')

                #correcting n_starts and n_stops for gap
                # listify if necessary
                n_starts_c = copy.deepcopy(n_starts) if type(n_starts) == list \
                    else [n_starts]
                n_stops_c = copy.deepcopy(n_stops) if type(n_stops) == list \
                    else [
                    n_stops]

                # shift start and stop times to allow gap correction if gap is known
                if gap_corrections[0]!=None:
                    # for time_list in [n_starts_c,n_stops_c]:
                    #     #iterate over all n_start and n_stops
                    #     for i in range(len(time_list)):
                    #         if time_list[i]>=gap_corrections[0]:
                    #             time_list[i] += gap_corrections[1]

                    #iterate over all n_start and n_stops
                    for i in range(len(n_starts_c)):
                        if n_starts_c[i]>=gap_corrections[0] \
                                + gap_corrections[1]:
                            n_starts_c[i] += gap_corrections[1]
                                        #iterate over all n_start and n_stops
                    for i in range(len(n_stops_c)):
                        if n_stops_c[i]>=gap_corrections[0]:
                            n_stops_c[i] += gap_corrections[1]


            # Load neo block
            block = BlackrockIO.read_block(
                self, index=index, name=name, description=description,
                nsx_to_load=nsx_to_load, n_starts=n_starts_c, n_stops=n_stops_c,
                channels=channels, units=units, load_waveforms=load_waveforms,
                load_events=load_events, scaling=scaling, lazy=lazy,
                cascade=cascade)

            # Apply alignment corrections
            #post correct gaps if gap is known
            if corrections and gap_corrections[0]!=None:
                # correct alignment
                for i in range(len(block.segments)):

                    # adjust spiketrains
                    for j in range(len(block.segments[i].spiketrains)):
                        st = block.segments[i].spiketrains[j]

                        #adjusting t_start
                        if st.t_start >= gap_corrections[0] + gap_corrections[1]:
                            st.t_start -= gap_corrections[1]

                        # correct for gap
                        st = st-((st>gap_corrections[0])*gap_corrections[1])

                        # discard spikes before t_start
                        if n_starts[i]:
                            idx_valid = np.nonzero(st >= n_starts[i])[0]
                            if len(idx_valid):
                                st = st[idx_valid[0]:]

                        # discard spikes after t_stop
                        if n_stops[i]:
                            idx_invalid = np.nonzero(st >= n_stops[i])[0]
                            if len(idx_invalid):
                                st = st[:idx_invalid[0]]

                        # shallow copy from original spiketrain (annotations, waveforms, etc.)
                        st.__dict__ = block.segments[i].spiketrains[j].__dict__.copy()

                        #adjusting t_stop
                        if st.t_stop >= gap_corrections[0] + gap_corrections[1]:
                            st.t_stop -= gap_corrections[1]

                        # link block to new spiketrain
                        block.segments[i].spiketrains[j] = st

                    # adjust analogsignals
                    for j in range(len(block.segments[i].analogsignals)):
                        # discard data after t_stop
                        if n_stops[i]:
                            idx_invalid = np.nonzero(block.segments[i].analogsignals[j].times >= n_stops[i])[0]
                            if len(idx_invalid):
                                block.segments[i].analogsignals[j] = block.segments[i].analogsignals[j][:idx_invalid[0]]

        else:
            # Load neo block
            block = BlackrockIO.read_block(
                self, index=index, name=name, description=description,
                nsx_to_load=nsx_to_load, n_starts=n_starts, n_stops=n_stops,
                channels=channels, units=units, load_waveforms=load_waveforms,
                load_events=load_events, scaling=scaling, lazy=lazy,
                cascade=cascade)

        # Annotate corrections to block
        block.annotate(corrected=corrections)

        monkey_prefix = os.path.basename(self.filename)[0]
        # Annotate Block with electrode id list for connector alignment
        if monkey_prefix in 'si':
            block.annotate(elid_list_ca=[-1, 81, 83, 85, 88, 90, 92, 93, 96, -1,
                                         79, 80, 84, 86, 87, 89, 91, 94, 63, 95,
                                         77, 78, 82, 49, 53, 55, 57, 59, 61, 32,
                                         75, 76, 45, 47, 51, 56, 58, 60, 64, 30,
                                         73, 74, 41, 43, 44, 46, 52, 62, 31, 28,
                                         71, 72, 39, 40, 42, 50, 54, 21, 29, 26,
                                         69, 70, 37, 38, 48, 15, 19, 25, 27, 24,
                                         67, 68, 35, 36, 5, 17, 13, 23, 20, 22,
                                         65, 66, 33, 34, 7, 9, 11, 12, 16, 18,
                                         - 1, 2, 1, 3, 4, 6, 8, 10, 14, -1])
        else:
            self._print_verbose('No connector aligned electrode IDs available '
                                'for monkey %s'%monkey_prefix)

        # Add annotations to analogsignals and spiketrains in block
        if 'elid_list_ca' in block.annotations:
            for seg in block.segments:
                # Add annotations to analog signals and spiketrains
                for sig in seg.analogsignals:
                    if sig.annotations['channel_id'] <= 100:
                        el_id = sig.annotations['channel_id']
                        sig.annotations['el_id'] = el_id
                        ca_id =block.annotations['elid_list_ca'].index(el_id) + 1
                        sig.annotations['ca_id'] = ca_id
                for st in seg.spiketrains:
                    if st.annotations['channel_id'] <= 100:
                        el_id = st.annotations['channel_id']
                        st.annotations['el_id'] = el_id
                        ca_id = block.annotations['elid_list_ca'].index(el_id) + 1
                        st.annotations['ca_id'] = ca_id
                        if st.annotations['unit_id'] in self.get_sua_ids(el_id):
                            st.annotations['sua'] = True
                        else:
                            st.annotations['sua'] = False
            for unit in block.list_units:
                if unit.annotations['channel_id'] <= 100:
                    el_id = unit.annotations['channel_id']
                    unit.annotations['el_id'] = el_id
                    ca_id = block.annotations['elid_list_ca'].index(el_id) + 1
                    unit.annotations['ca_id'] = ca_id
                    if unit.annotations['unit_id'] in self.get_sua_ids(el_id):
                        unit.annotations['sua'] = True
                    else:
                        unit.annotations['sua'] = False
        return block
Ejemplo n.º 13
0
    def test_compare_blackrockio_with_matlabloader_v21(self):
        """
        This test compares the output of BlackrockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events.
        """

        dirname = self.get_local_path('blackrock/blackrock_2_1/l101210-001')
        # First run with parameters for ns5, then run with correct parameters for ns2
        parameters = [('blackrock/blackrock_2_1/l101210-001_nev-02_ns5.mat', {
            'nsx_to_load': 5,
            'nev_override': '-'.join([dirname, '02'])
        }), ('blackrock/blackrock_2_1/l101210-001.mat', {
            'nsx_to_load': 2
        })]
        for index, param in enumerate(parameters):
            # Load data from matlab generated files
            ml = scipy.io.loadmat(self.get_local_path(param[0]))
            lfp_ml = ml['lfp']  # (channel x time) LFP matrix
            ts_ml = ml['ts']  # spike time stamps
            elec_ml = ml['el']  # spike electrodes
            unit_ml = ml['un']  # spike unit IDs
            wf_ml = ml['wf']  # waveforms
            mts_ml = ml['mts']  # marker time stamps
            mid_ml = ml['mid']  # marker IDs

            # Load data from original data files using the Neo BlackrockIO
            session = BlackrockIO(dirname, verbose=False, **param[1])
            block = session.read_block(load_waveforms=True,
                                       signal_group_mode='split-all')
            # Check if analog data are equal
            self.assertGreater(len(block.groups), 0)

            # Check if spikes are equal
            self.assertEqual(len(block.segments), 1)
            for st_i in block.segments[0].spiketrains:
                channelid = st_i.annotations['channel_id']
                unitid = st_i.annotations['unit_id']

                # Compare waveforms
                matlab_wf = wf_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml ==
                                   unitid)), :][0]
                # Atleast_2d as correction for waveforms that are saved
                # in single dimension in SpikeTrain
                # because only one waveform is available
                assert_equal(
                    np.atleast_2d(np.squeeze(st_i.waveforms).magnitude),
                    matlab_wf)

                # Compare spike timestamps
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                # Going sure that unit is really seconds and not 1/30000 seconds
                if (not st_i.units == pq.CompoundUnit("1.0/{} * s".format(30000))) and \
                        st_i.units == pq.s:
                    st_i = np.round(st_i.base * 30000).astype(int)
                assert_equal(st_i, matlab_spikes)

            # Check if digital input port events are equal
            self.assertGreater(len(block.segments[0].events), 0)
            for ea_i in block.segments[0].events:
                if ea_i.name == 'digital_input_port':
                    # Get all digital event IDs in this recording
                    marker_ids = set(ea_i.labels)
                    for marker_id in marker_ids:
                        python_digievents = np.round(
                            ea_i.times.base[ea_i.labels == marker_id] *
                            30000).astype(int)
                        matlab_digievents = mts_ml[np.nonzero(
                            mid_ml == int(marker_id))]
                        assert_equal(python_digievents, matlab_digievents)
Ejemplo n.º 14
0
    def test_load_waveforms(self):
        filename = self.get_filename_path('FileSpec2.3001')
        reader = BlackrockIO(filename=filename, verbose=False)

        bl = reader.read_block(load_waveforms=True)
        assert_neo_object_is_compliant(bl)
Ejemplo n.º 15
0
    def __init__(
            self, filename, odmldir=None, nsx_override=None, nev_override=None,
            sif_override=None, ccf_override=None, odml_filename=None,
            verbose=False):
        """
        Constructor
        """

        # Remember choice whether to print diagnostic messages or not
        self._verbose = verbose
        self.__verbose_messages = []

        if odmldir is None:
            odmldir = ''

        for ext in self.extensions:
            filename = re.sub(os.path.extsep + ext + '$', '', filename)


        sorting_version = None
        txtpostfix = None
        if nev_override:
            sorting_version = nev_override
        else:
            nev_versions = [re.sub(
                os.path.extsep + 'nev$', '', p) for p in glob.glob(
                    filename + '*.nev')]
            nev_versions = [p.replace(filename, '') for p in nev_versions]
            if nev_versions:
                sorting_version = sorted(nev_versions)[-1]

        if sorting_version:
            if os.path.isfile(r'' + filename + sorting_version + "-test.txt"):
                txtpostfix = sorting_version + '-test'
            elif os.path.isfile(r'' + filename + sorting_version + ".txt"):
                txtpostfix = sorting_version

        # Initialize file
        BlackrockIO.__init__(
            self, filename, nsx_override=nsx_override,
            nev_override=filename + sorting_version, sif_override=sif_override,
            ccf_override=ccf_override, verbose=verbose)

        # printing can be only done after initialization of BlackrockIO
        if sorting_version:
            # Output which file is used in the end
            self._print_verbose("Using nev file: " + filename + sorting_version
                                + ".nev")

        if txtpostfix:
            # Output which file is used in the end
            self._print_verbose("Using txt sorting file: " + filename +
                                   txtpostfix + ".txt")

        # remove extensions from overrides
        filen = os.path.split(self.filename)[-1]
        if odml_filename:
            self._filenames['odml'] = ''.join(
                [odmldir, os.path.sep, odml_filename])
        else:
            self._filenames['odml'] = ''.join([odmldir, os.path.sep, filen])

        file2check = ''.join([self._filenames['odml'], os.path.extsep, 'odml'])
        if os.path.exists(file2check):
            self._avail_files['odml'] = True
            self.odmldoc = odml.tools.xmlparser.load(file2check)
        else:
            self._avail_files['odml'] = False
            self.odmldoc = None




        # Determine path to sorting file (txt) if it exists
        if txtpostfix == None:
            self.txt_fileprefix = None

        else:
            self.txt_fileprefix = filename + txtpostfix


        # TODO: Put this in again!
        # Interpret file
        self.__load_suamua()
Ejemplo n.º 16
0
    def test_load_muliple_nsx(self):
        """
        Test if multiple nsx signals can be loaded at the same time.
        """
        filename = self.get_filename_path('blackrock_2_1/l101210-001')
        reader = BlackrockIO(filename=filename,
                             verbose=False,
                             nsx_to_load='all')

        # number of different sampling rates corresponds to number of nsx signals, because
        # single nsx contains only signals of identical sampling rate
        block = reader.read_block(load_waveforms=False)
        sampling_rates = np.unique([
            a.sampling_rate.rescale('Hz')
            for a in block.filter(objects='AnalogSignal')
        ])
        self.assertEqual(len(sampling_rates), 2)

        segment = reader.read_segment()
        sampling_rates = np.unique([
            a.sampling_rate.rescale('Hz')
            for a in segment.filter(objects='AnalogSignal')
        ])
        self.assertEqual(len(sampling_rates), 2)

        # load only ns5
        reader = BlackrockIO(filename=filename, nsx_to_load=5)
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (109224, 96))

        # load only ns2
        reader = BlackrockIO(filename=filename, nsx_to_load=2)
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (3640, 6))

        # load only ns2
        reader = BlackrockIO(filename=filename, nsx_to_load=[2])
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)

        # load ns2 + ns5
        reader = BlackrockIO(filename=filename, nsx_to_load=[2, 5])
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 2)
        self.assertEqual(seg.analogsignals[0].shape, (3640, 6))
        self.assertEqual(seg.analogsignals[1].shape, (109224, 96))

        # load only ns5
        reader = BlackrockIO(filename=filename, nsx_to_load='max')
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (109224, 96))
Ejemplo n.º 17
0
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """
        filename = self.get_filename_path('FileSpec2.3001')
        reader = BlackrockIO(filename=filename, verbose=False, nsx_to_load=5,)
        
        
        # Load data to maximum extent, one None is not given as list
        block = reader.read_block(time_slices=None,  load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording raise an error
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        buggy_slice = (-100 * pq.ms, too_large_tstop)

        #this raise error in read_block
        with self.assertRaises(ValueError):
            block = reader.read_block(time_slices=[buggy_slice])
        
        #but this is valid in read_segment because seg_index is specified
        seg = reader.read_segment(seg_index=0, time_slice=buggy_slice)
        
        lenb = len(seg.analogsignals[0])
        numspb = len(seg.spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # n_starts and n_stops not given as list
        # verifies identical length of returned signals given equal durations
        # as input
        ns5_unit = block.segments[0].analogsignals[0].sampling_period
        time_slice = (100 * ns5_unit, 200 * ns5_unit)
        block = reader.read_block(time_slices=[time_slice])
        lena = len(block.segments[0].analogsignals[0])
        
        time_slice = (100 * ns5_unit, 200 * ns5_unit)
        block = reader.read_block(time_slices=[time_slice])
        lenb = len(block.segments[0].analogsignals[0])

        # Same length?
        self.assertEqual(lena, lenb)
        # Length should be 100 samples exactly
        self.assertEqual(lena, 100)

        # test 4 Units
        time_slices=[(0, 1000*pq.ms), (3000*pq.ms, 4000*pq.ms)]
        block = reader.read_block(time_slices=time_slices, load_waveforms=True,
                    units_group_mode='all-in-one')

        self.assertEqual(len(block.segments), 2)
        self.assertEqual(len(block.segments[0].analogsignals), 10)
        self.assertEqual(len(block.channel_indexes[-1].units), 4)
        self.assertEqual(len(block.channel_indexes[-1].units), 
                    len(block.segments[0].spiketrains))
        
        anasig = block.segments[0].analogsignals[0]
        self.assertIsNotNone(anasig.file_origin)
    def test_compare_blackrockio_with_matlabloader(self):
        """
        This test compares the output of BlackRockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events on channels
        80-83 and spike waveforms on channel 82, unit 1.
        For details on the file contents, refer to FileSpec2.3.txt
        """

        # Load data from Matlab generated files
        ml = scipy.io.loadmat(
            get_test_file_full_path(ioclass=BlackrockIO,
                                    filename='FileSpec2.3001.mat',
                                    directory=self.local_test_dir,
                                    clean=False))
        lfp_ml = ml['lfp']  # (channel x time) LFP matrix
        ts_ml = ml['ts']  # spike time stamps
        elec_ml = ml['el']  # spike electrodes
        unit_ml = ml['un']  # spike unit IDs
        wf_ml = ml['wf']  # waveform unit 1 channel 1
        mts_ml = ml['mts']  # marker time stamps
        mid_ml = ml['mid']  # marker IDs

        # Load data in channels 1-3 from original data files using the Neo
        # BlackrockIO
        session = BlackrockIO(get_test_file_full_path(
            ioclass=BlackrockIO,
            filename='FileSpec2.3001',
            directory=self.local_test_dir,
            clean=False),
                              verbose=False)
        block = session.read_block(channels=range(1, 9),
                                   units='all',
                                   nsx_to_load='all',
                                   scaling='raw',
                                   load_waveforms=True,
                                   load_events=True)

        # Check if analog data on channels 1-8 are equal
        self.assertGreater(len(block.channel_indexes), 0)
        for chidx in block.channel_indexes:
            # Should only have one AnalogSignal per ChannelIndex
            self.assertEqual(len(chidx.analogsignals), 1)

            idx = chidx.analogsignals[0].annotations['channel_id']
            if idx in range(1, 9):
                # We ignore the last sample of the Analogsignal returned by the
                # Python implementation, since due to an error in the
                # corresponding matlab loader the last sample was ignored and
                # not saved to the test file
                assert_equal(np.squeeze(chidx.analogsignals[0].base[:-1]),
                             lfp_ml[idx - 1, :])

        # Check if spikes in channels 1,3,5,7 are equal
        self.assertEqual(len(block.segments), 1)
        for st_i in block.segments[0].spiketrains:
            channelid = st_i.annotations['channel_id']
            if channelid in range(1, 7, 2):
                unitid = st_i.annotations['unit_id']
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                assert_equal(st_i.base, matlab_spikes)

                # Check waveforms of channel 1, unit 0
                if channelid == 1 and unitid == 0:
                    assert_equal(np.squeeze(st_i.waveforms), wf_ml)

        # Check if digital input port events are equal
        self.assertGreater(len(block.segments[0].events), 0)
        for ea_i in block.segments[0].events:
            if ea_i.name == 'digital_input_port':
                # Get all digital event IDs in this recording
                marker_ids = set(ea_i.labels)
                for marker_id in marker_ids:
                    python_digievents = ea_i.times.base[ea_i.labels ==
                                                        marker_id]
                    matlab_digievents = mts_ml[np.nonzero(
                        mid_ml == int(marker_id))]
                    assert_equal(python_digievents, matlab_digievents)
Ejemplo n.º 19
0
    def test_compare_blackrockio_with_matlabloader(self):
        """
        This test compares the output of ReachGraspIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events on channels
        80-83 and spike waveforms on channel 82, unit 1.
        For details on the file contents, refer to FileSpec2.3.txt
        """

        # Load data from Matlab generated files
        ml = scipy.io.loadmat(
            get_test_file_full_path(
                ioclass=BlackrockIO,
                filename='FileSpec2.3001.mat',
                directory=self.local_test_dir, clean=False))
        lfp_ml = ml['lfp']  # (channel x time) LFP matrix
        ts_ml = ml['ts']  # spike time stamps
        elec_ml = ml['el']  # spike electrodes
        unit_ml = ml['un']  # spike unit IDs
        wf_ml = ml['wf']  # waveform unit 1 channel 1
        mts_ml = ml['mts']  # marker time stamps
        mid_ml = ml['mid']  # marker IDs

        # Load data in channels 1-3 from original data files using the neo
        # framework
        session = BlackrockIO(
            get_test_file_full_path(
                ioclass=BlackrockIO,
                filename='FileSpec2.3001',
                directory=self.local_test_dir, clean=False),
            verbose=False)
        block = session.read_block(load_waveforms=True)

        # Check if analog data on channels 1-8 are equal
        for rcg_i in block.channel_indexes:
            # Should only have one recording channel per group
            self.assertEqual(rcg_i.size, 1)

            idx = rcg_i[0]
            if idx in range(1, 9):
                assert_equal(rcg_i.analogsignal.base, lfp_ml[idx - 1, :])

        # Should only have one segment
        self.assertEqual(len(block.segments), 1)

        # Check if spikes in channels 1,3,5,7 are equal
        for st_i in block.segments[0].spiketrains:
            channelid = st_i.annotations['channel_id']
            if channelid in range(1, 7, 2):
                unitid = st_i.annotations['unit_id']
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                assert_equal(st_i.base, matlab_spikes)

                # Check waveforms of channel 1, unit 0
                if channelid == 1 and unitid == 0:
                    assert_equal(st_i.waveforms, wf_ml)

        # Check if digital marker events are equal
        for ea_i in block.segments[0].events:
            if ('digital_marker' in ea_i.annotations.keys()) and (
                    ea_i.annotations['digital_marker'] is True):
                markerid = ea_i.annotations['marker_id']
                matlab_digievents = mts_ml[np.nonzero(mid_ml == markerid)]
                assert_equal(ea_i.times.base, matlab_digievents)

        # Check if analog marker events are equal
        # Currently not implemented by the Matlab loader
        for ea_i in block.segments[0].events:
            if ('analog_marker' in ea_i.annotations.keys()) and (
                    ea_i.annotations['analog_marker'] is True):
                markerid = ea_i.annotations['marker_id']
                matlab_anaevents = mts_ml[np.nonzero(mid_ml == markerid)]
                assert_equal(ea_i.times.base, matlab_anaevents)
Ejemplo n.º 20
0
    def test_compare_blackrockio_with_matlabloader(self):
        """
        This test compares the output of ReachGraspIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events on channels
        80-83 and spike waveforms on channel 82, unit 1.
        For details on the file contents, refer to FileSpec2.3.txt
        """

        # Load data from Matlab generated files
        ml = scipy.io.loadmat(
            get_test_file_full_path(
                ioclass=BlackrockIO,
                filename='FileSpec2.3001.mat',
                directory=self.local_test_dir, clean=False))
        lfp_ml = ml['lfp']  # (channel x time) LFP matrix
        ts_ml = ml['ts']  # spike time stamps
        elec_ml = ml['el']  # spike electrodes
        unit_ml = ml['un']  # spike unit IDs
        wf_ml = ml['wf']  # waveform unit 1 channel 1
        mts_ml = ml['mts']  # marker time stamps
        mid_ml = ml['mid']  # marker IDs

        # Load data in channels 1-3 from original data files using the Neo
        # BlackrockIO
        session = BlackrockIO(
            get_test_file_full_path(
                ioclass=BlackrockIO,
                filename='FileSpec2.3001',
                directory=self.local_test_dir, clean=False),
            verbose=False)
        block = session.read_block(
            channels=range(1, 9), units='all', nsx_to_load='all',
            scaling='raw', load_waveforms=True, load_events=True)

        # Check if analog data on channels 1-8 are equal
        self.assertGreater(len(block.channel_indexes), 0)
        for chidx in block.channel_indexes:
            # Should only have one AnalogSignal per ChannelIndex
            self.assertEqual(len(chidx.analogsignals), 1)

            idx = chidx.analogsignals[0].annotations['channel_id']
            if idx in range(1, 9):
                # We ignore the last sample of the Analogsignal returned by the
                # Python implementation, since due to an error in the
                # corresponding matlab loader the last sample was ignored and
                # not saved to the test file
                assert_equal(np.squeeze(
                    chidx.analogsignals[0].base[:-1]), lfp_ml[idx - 1, :])

        # Check if spikes in channels 1,3,5,7 are equal
        self.assertEqual(len(block.segments), 1)
        for st_i in block.segments[0].spiketrains:
            channelid = st_i.annotations['channel_id']
            if channelid in range(1, 7, 2):
                unitid = st_i.annotations['unit_id']
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                assert_equal(st_i.base, matlab_spikes)

                # Check waveforms of channel 1, unit 0
                if channelid == 1 and unitid == 0:
                    assert_equal(np.squeeze(st_i.waveforms), wf_ml)

        # Check if digital input port events are equal
        self.assertGreater(len(block.segments[0].events), 0)
        for ea_i in block.segments[0].events:
            if ea_i.name == 'digital_input_port':
                # Get all digital event IDs in this recording
                marker_ids = set(ea_i.labels)
                for marker_id in marker_ids:
                    python_digievents = ea_i.times.base[
                        ea_i.labels == marker_id]
                    matlab_digievents = mts_ml[
                        np.nonzero(mid_ml == int(marker_id))]
                    assert_equal(python_digievents, matlab_digievents)
    def test_compare_blackrockio_with_matlabloader_V23(self):
        """
        This test compares the output of BlackrockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a .mat
        file created by the script create_data_matlab_blackrock.m.

        The function tests LFPs, spike times, and digital events on channels
        1-8 and spike waveforms on channel 1, unit 0.
        
        For details on the file contents, refer to FileSpec2.3.txt
        """

        # Turns false on error
        allok = True

        # Load data from Matlab generated files
        ml = scipy.io.loadmat(os.path.join(tempfile.gettempdir(), 'files_for_testing_neo', 'blackrock', 'FileSpec2.3001.mat'))
        lfp_ml = ml['lfp']  # (channel x time) LFP matrix
        ts_ml = ml['ts']  # spike time stamps
        elec_ml = ml['el']  # spike electrodes
        unit_ml = ml['un']  # spike unit IDs
        wf_ml = ml['wf']  # waveform unit 1 channel 1
        mts_ml = ml['mts']  # marker time stamps
        mid_ml = ml['mid']  # marker IDs

        # Load data in channels 1-3 from original data files using neo framework
        try:
            session = BlackrockIO(os.path.join(tempfile.gettempdir(), 'files_for_testing_neo', 'blackrock', 'FileSpec2.3001'),
                                        print_diagnostic=False)
            block = session.read_block(n_starts=[None], n_stops=[None],
                                       channel_list=range(1, 9), nsx=5, units=[],
                                       events=True, waveforms=True)
        except:
            allok = False

        # Check if analog data on channels 1-8 are equal
        for rcg_i in block.recordingchannelgroups:
            # Should only have one recording channel per group
            if len(rcg_i.recordingchannels) != 1:
                allok = False

            rc = rcg_i.recordingchannels[0]
            idx = rc.index
            if idx in range(1, 9):
                if np.any(rc.analogsignals[0].base - lfp_ml[idx - 1, :]):
                    allok = False

        # Should only have one segment
        if len(block.segments) != 1:
            allok = False

        # Check if spikes in channels 1,3,5,7 are equal
        for st_i in block.segments[0].spiketrains:
            channelid = st_i.annotations['channel_id']
            if channelid in range(1, 7, 2):
                unitid = st_i.annotations['unit_id']
                matlab_spikes = ts_ml[np.nonzero(np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                if np.any(st_i.base - matlab_spikes):
                    allok = False

                # Check waveforms of channel 1, unit 0
                if channelid == 1 and unitid == 0:
                    if np.any(st_i.waveforms - wf_ml):
                        allok = False

        # Check if digital marker events are equal
        for ea_i in block.segments[0].eventarrays:
            if 'digital_marker' in ea_i.annotations.keys() and ea_i.annotations['digital_marker'] == True:
                markerid = ea_i.annotations['marker_id']
                matlab_digievents = mts_ml[np.nonzero(mid_ml == markerid)]
                if np.any(ea_i.times.base - matlab_digievents):
                    allok = False

        # Check if analog marker events are equal
        # Currently not implemented by the Matlab loader
        for ea_i in block.segments[0].eventarrays:
            if 'analog_marker' in ea_i.annotations.keys() and ea_i.annotations['analog_marker'] == True:
                markerid = ea_i.annotations['marker_id']
                matlab_anaevents = mts_ml[np.nonzero(mid_ml == markerid)]
                if np.any(ea_i.times.base - matlab_anaevents):
                    allok = False

        # Final result
        self.assertTrue(allok)
Ejemplo n.º 22
0
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """
        filename = self.get_filename_path('FileSpec2.3001')
        reader = BlackrockIO(
            filename=filename,
            verbose=False,
            nsx_to_load=5,
        )

        # Load data to maximum extent, one None is not given as list
        block = reader.read_block(time_slices=None, load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording raise an error
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        buggy_slice = (-100 * pq.ms, too_large_tstop)

        #this raise error in read_block
        with self.assertRaises(ValueError):
            block = reader.read_block(time_slices=[buggy_slice])

        #but this is valid in read_segment because seg_index is specified
        seg = reader.read_segment(seg_index=0, time_slice=buggy_slice)

        lenb = len(seg.analogsignals[0])
        numspb = len(seg.spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # n_starts and n_stops not given as list
        # verifies identical length of returned signals given equal durations
        # as input
        ns5_unit = block.segments[0].analogsignals[0].sampling_period
        time_slice = (100 * ns5_unit, 200 * ns5_unit)
        block = reader.read_block(time_slices=[time_slice])
        lena = len(block.segments[0].analogsignals[0])

        time_slice = (100 * ns5_unit, 200 * ns5_unit)
        block = reader.read_block(time_slices=[time_slice])
        lenb = len(block.segments[0].analogsignals[0])

        # Same length?
        self.assertEqual(lena, lenb)
        # Length should be 100 samples exactly
        self.assertEqual(lena, 100)

        # test 4 Units
        time_slices = [(0, 1000 * pq.ms), (3000 * pq.ms, 4000 * pq.ms)]
        block = reader.read_block(time_slices=time_slices,
                                  load_waveforms=True,
                                  units_group_mode='all-in-one')

        self.assertEqual(len(block.segments), 2)
        self.assertEqual(len(block.segments[0].analogsignals), 10)
        self.assertEqual(len(block.channel_indexes[-1].units), 4)
        self.assertEqual(len(block.channel_indexes[-1].units),
                         len(block.segments[0].spiketrains))

        anasig = block.segments[0].analogsignals[0]
        self.assertIsNotNone(anasig.file_origin)
Ejemplo n.º 23
0
    def test_load_muliple_nsx(self):
        """
        Test if multiple nsx signals can be loaded at the same time.
        """
        filename = self.get_filename_path('blackrock_2_1/l101210-001')
        reader = BlackrockIO(filename=filename, verbose=False, nsx_to_load='all')

        # number of different sampling rates corresponds to number of nsx signals, because
        # single nsx contains only signals of identical sampling rate
        block = reader.read_block(load_waveforms=False)
        sampling_rates = np.unique(
            [a.sampling_rate.rescale('Hz') for a in block.filter(objects='AnalogSignal')])
        self.assertEqual(len(sampling_rates), 2)

        segment = reader.read_segment()
        sampling_rates = np.unique(
            [a.sampling_rate.rescale('Hz') for a in segment.filter(objects='AnalogSignal')])
        self.assertEqual(len(sampling_rates), 2)

        # load only ns5
        reader = BlackrockIO(filename=filename, nsx_to_load=5)
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (109224, 96))

        # load only ns2
        reader = BlackrockIO(filename=filename, nsx_to_load=2)
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (3640, 6))

        # load only ns2
        reader = BlackrockIO(filename=filename, nsx_to_load=[2])
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)

        # load ns2 + ns5
        reader = BlackrockIO(filename=filename, nsx_to_load=[2, 5])
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 2)
        self.assertEqual(seg.analogsignals[0].shape, (3640, 6))
        self.assertEqual(seg.analogsignals[1].shape, (109224, 96))

        # load only ns5
        reader = BlackrockIO(filename=filename, nsx_to_load='max')
        seg = reader.read_segment()
        self.assertEqual(len(seg.analogsignals), 1)
        self.assertEqual(seg.analogsignals[0].shape, (109224, 96))
Ejemplo n.º 24
0
    def test_compare_blackrockio_with_matlabloader_v21(self):
        """
        This test compares the output of BlackrockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events.
        """

        dirname = get_test_file_full_path(ioclass=BlackrockIO,
                                          filename='blackrock_2_1/l101210-001',
                                          directory=self.local_test_dir, clean=False)
        # First run with parameters for ns5, then run with correct parameters for ns2
        parameters = [('blackrock_2_1/l101210-001_nev-02_ns5.mat',
                       {'nsx_to_load': 5, 'nev_override': '-'.join([dirname, '02'])}),
                      ('blackrock_2_1/l101210-001.mat', {'nsx_to_load': 2})]
        for index, param in enumerate(parameters):
            # Load data from matlab generated files
            ml = scipy.io.loadmat(
                get_test_file_full_path(
                    ioclass=BlackrockIO,
                    filename=param[0],
                    directory=self.local_test_dir, clean=False))
            lfp_ml = ml['lfp']  # (channel x time) LFP matrix
            ts_ml = ml['ts']  # spike time stamps
            elec_ml = ml['el']  # spike electrodes
            unit_ml = ml['un']  # spike unit IDs
            wf_ml = ml['wf']  # waveforms
            mts_ml = ml['mts']  # marker time stamps
            mid_ml = ml['mid']  # marker IDs

            # Load data from original data files using the Neo BlackrockIO
            session = BlackrockIO(
                dirname,
                verbose=False, **param[1])
            block = session.read_block(load_waveforms=True, signal_group_mode='split-all')
            # Check if analog data are equal
            self.assertGreater(len(block.channel_indexes), 0)
            for i, chidx in enumerate(block.channel_indexes):
                # Break for ChannelIndexes for Units that don't contain any Analogsignals
                if len(chidx.analogsignals) == 0 and len(chidx.units) >= 1:
                    break
                # Should only have one AnalogSignal per ChannelIndex
                self.assertEqual(len(chidx.analogsignals), 1)

                # Find out channel_id in order to compare correctly
                idx = chidx.analogsignals[0].annotations['channel_id']
                # Get data of AnalogSignal without pq.units
                anasig = np.squeeze(chidx.analogsignals[0].base[:].magnitude)
                # Test for equality of first nonzero values of AnalogSignal
                #                                   and matlab file contents
                # If not equal test if hardcoded gain is responsible for this
                # See BlackrockRawIO ll. 1420 commit 77a645655605ae39eca2de3ee511f3b522f11bd7
                j = 0
                while anasig[j] == 0:
                    j += 1
                if lfp_ml[i, j] != np.squeeze(chidx.analogsignals[0].base[j].magnitude):
                    anasig = anasig / 152.592547
                    anasig = np.round(anasig).astype(int)

                # Special case because id 142 is not included in ns2 file
                if idx == 143:
                    idx -= 1
                if idx > 128:
                    idx = idx - 136

                assert_equal(anasig, lfp_ml[idx - 1, :])

            # Check if spikes are equal
            self.assertEqual(len(block.segments), 1)
            for st_i in block.segments[0].spiketrains:
                channelid = st_i.annotations['channel_id']
                unitid = st_i.annotations['unit_id']

                # Compare waveforms
                matlab_wf = wf_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid)), :][0]
                # Atleast_2d as correction for waveforms that are saved
                # in single dimension in SpikeTrain
                # because only one waveform is available
                assert_equal(np.atleast_2d(np.squeeze(st_i.waveforms).magnitude), matlab_wf)

                # Compare spike timestamps
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                # Going sure that unit is really seconds and not 1/30000 seconds
                if (not st_i.units == pq.CompoundUnit("1.0/{0} * s".format(30000))) and \
                        st_i.units == pq.s:
                    st_i = np.round(st_i.base * 30000).astype(int)
                assert_equal(st_i, matlab_spikes)

            # Check if digital input port events are equal
            self.assertGreater(len(block.segments[0].events), 0)
            for ea_i in block.segments[0].events:
                if ea_i.name == 'digital_input_port':
                    # Get all digital event IDs in this recording
                    marker_ids = set(ea_i.labels)
                    for marker_id in marker_ids:
                        python_digievents = np.round(
                            ea_i.times.base[ea_i.labels == marker_id] * 30000).astype(int)
                        matlab_digievents = mts_ml[
                            np.nonzero(mid_ml == int(marker_id))]
                        assert_equal(python_digievents, matlab_digievents)
Ejemplo n.º 25
0
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """

        try:
            b = BlackrockIO(
                get_test_file_full_path(
                    ioclass=BlackrockIO,
                    filename='FileSpec2.3001',
                    directory=self.local_test_dir, clean=False),
                verbose=False)

        except:
            self.fail()

        # Load data to maximum extent, one None is not given as list
        block = b.read_block(
            n_starts=[None], n_stops=None, channels=range(1, 9),
            nsx_to_load=5, units='all', load_events=True,
            load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        block = b.read_block(
            n_starts=[-100 * pq.ms], n_stops=[too_large_tstop],
            channels=range(1, 9), nsx_to_load=[5], units='all',
            load_events=False, load_waveforms=False)
        lenb = len(block.segments[0].analogsignals[0])
        numspb = len(block.segments[0].spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # n_starts and n_stops not given as list
        # verifies identical length of returned signals given equal durations
        # as input
        ns5_unit = block.segments[0].analogsignals[0].sampling_period
        block = b.read_block(
            n_starts=100 * ns5_unit, n_stops=200 * ns5_unit,
            channels=range(1, 9), nsx_to_load=5, units='all',
            load_events=False, load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])

        block = b.read_block(
            n_starts=301 * ns5_unit, n_stops=401 * ns5_unit,
            channels=range(1, 9), nsx_to_load=5, units='all',
            load_events=False, load_waveforms=False)
        lenb = len(block.segments[0].analogsignals[0])

        # Same length?
        self.assertEqual(lena, lenb)
        # Length should be 100 samples exactly
        self.assertEqual(lena, 100)

        # Load partial data types and check if this is selection is made
        block = b.read_block(
            n_starts=None, n_stops=None, channels=range(1, 9),
            nsx_to_load=5, units='none', load_events=False,
            load_waveforms=True)

        self.assertEqual(len(block.segments), 1)
        self.assertEqual(len(block.segments[0].analogsignals), 8)
        self.assertEqual(len(block.channel_indexes), 8)
        self.assertEqual(len(block.channel_indexes[0].units), 0)
        self.assertEqual(len(block.segments[0].events), 0)
        self.assertEqual(len(block.segments[0].spiketrains), 0)

        # NOTE: channel 6 does not contain any unit
        block = b.read_block(
            n_starts=[None, 3000 * pq.ms], n_stops=[1000 * pq.ms, None],
            channels=range(1, 9), nsx_to_load='none',
            units={1: 0, 5: 0, 6: 0}, load_events=True,
            load_waveforms=True)

        self.assertEqual(len(block.segments), 2)
        self.assertEqual(len(block.segments[0].analogsignals), 0)
        self.assertEqual(len(block.channel_indexes), 8)
        self.assertEqual(len(block.channel_indexes[0].units), 1)
        self.assertEqual(len(block.segments[0].events), 0)
        self.assertEqual(len(block.segments[0].spiketrains), 2)
Ejemplo n.º 26
0
    def test_segment_detection_pause(self):
        """
        This test makes sure segments are detected correctly when pause was used during recording.
        """

        # Path to nev that has spikes that don't fit nsX segment
        filename_nev_outside_seg = self.get_filename_path(
            'segment/PauseSpikesOutside/pause_spikes_outside_seg')
        # Path to nsX and nev that are correct
        filename = self.get_filename_path('segment/PauseCorrect/pause_correct')

        # This issues a warning, because there are spikes a long time after the last segment
        # And another one because there are spikes between segments
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            reader = BlackrockIO(filename=filename,
                                 nsx_to_load=2,
                                 nev_override=filename_nev_outside_seg)
            self.assertGreaterEqual(len(w), 2)

            # Check that warnings are correct
            messages = [
                str(warning.message) for warning in w
                if warning.category == UserWarning
            ]
            self.assertIn('Spikes outside any segment. Detected on segment #1',
                          messages)
            self.assertIn('Spikes 0.0776s after last segment.', messages)

        block = reader.read_block(load_waveforms=False,
                                  signal_group_mode="split-all")

        # 2 segments
        self.assertEqual(len(block.segments), 2)

        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        # This value is so high, because a spike occurred right before the second segment
        # And thus is added to the first segment
        # This is not normal behavior and occurs because of the way the files were cut
        # into test files
        self.assertAlmostEqual(block.segments[0].t_stop.magnitude, 15.83916667)
        # Clock is not reset
        self.assertEqual(block.segments[1].t_start.magnitude, 31.0087)
        # Segment time is longer here as well because of spikes after second segment
        self.assertEqual(block.segments[1].t_stop.magnitude, 35.0863)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        # The AnalogSignal is only 4 seconds long, as opposed to the segment
        # whose length is caused by the additional spike
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 31.0087)
        self.assertAlmostEqual(
            block.segments[1].analogsignals[0].t_stop.magnitude,
            35.0087,
            places=6)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertAlmostEqual(
            block.segments[0].spiketrains[0].t_stop.magnitude,
            15.83916667,
            places=8)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 31.0087)
        self.assertEqual(block.segments[1].spiketrains[0].t_stop, 35.0863)

        # Each segment has same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # Analogsignals have exactly 4000 samples
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4000)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 4000)

        # This case is correct, no spikes outside segment or anything
        reader = BlackrockIO(filename=filename, nsx_to_load=2)
        block = reader.read_block(load_waveforms=False,
                                  signal_group_mode="split-all")

        # 2 segments
        self.assertEqual(len(block.segments), 2)

        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        # Now segment time is only 4 seconds, because there were no additional spikes
        self.assertEqual(block.segments[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].t_start, 31.0087)
        self.assertAlmostEqual(block.segments[1].t_stop.magnitude,
                               35.0087,
                               places=6)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 31.0087)
        self.assertAlmostEqual(
            block.segments[1].analogsignals[0].t_stop.magnitude,
            35.0087,
            places=6)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertEqual(block.segments[0].spiketrains[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 31.0087)
        self.assertAlmostEqual(
            block.segments[1].spiketrains[0].t_stop.magnitude,
            35.0087,
            places=6)

        # Each segment has same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # ns2 was created in such a way that all analogsignals have 4000 samples
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4000)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 4000)
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file
        to check for parsing errors.
        """

        try:
            b = BlackrockIO(get_test_file_full_path(
                ioclass=BlackrockIO,
                filename='FileSpec2.3001',
                directory=self.local_test_dir,
                clean=False),
                            verbose=False)
        except:
            self.fail()

        # Load data to maximum extent, one None is not given as list
        block = b.read_block(n_starts=[None],
                             n_stops=None,
                             channels=range(1, 9),
                             nsx_to_load=5,
                             units='all',
                             load_events=True,
                             load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])
        numspa = len(block.segments[0].spiketrains[0])

        # Load data using a negative time and a time exceeding the end of the
        # recording
        too_large_tstop = block.segments[0].analogsignals[0].t_stop + 1 * pq.s
        block = b.read_block(n_starts=[-100 * pq.ms],
                             n_stops=[too_large_tstop],
                             channels=range(1, 9),
                             nsx_to_load=[5],
                             units='all',
                             load_events=False,
                             load_waveforms=False)
        lenb = len(block.segments[0].analogsignals[0])
        numspb = len(block.segments[0].spiketrains[0])

        # Same length of analog signal?
        # Both should have read the complete data set!
        self.assertEqual(lena, lenb)

        # Same length of spike train?
        # Both should have read the complete data set!
        self.assertEqual(numspa, numspb)

        # n_starts and n_stops not given as list
        # verifies identical length of returned signals given equal durations
        # as input
        ns5_unit = block.segments[0].analogsignals[0].sampling_period
        block = b.read_block(n_starts=100 * ns5_unit,
                             n_stops=200 * ns5_unit,
                             channels=range(1, 9),
                             nsx_to_load=5,
                             units='all',
                             load_events=False,
                             load_waveforms=False)
        lena = len(block.segments[0].analogsignals[0])

        block = b.read_block(n_starts=301 * ns5_unit,
                             n_stops=401 * ns5_unit,
                             channels=range(1, 9),
                             nsx_to_load=5,
                             units='all',
                             load_events=False,
                             load_waveforms=False)
        lenb = len(block.segments[0].analogsignals[0])

        # Same length?
        self.assertEqual(lena, lenb)
        # Length should be 100 samples exactly
        self.assertEqual(lena, 100)

        # Load partial data types and check if this is selection is made
        block = b.read_block(n_starts=None,
                             n_stops=None,
                             channels=range(1, 9),
                             nsx_to_load=5,
                             units='none',
                             load_events=False,
                             load_waveforms=True)

        self.assertEqual(len(block.segments), 1)
        self.assertEqual(len(block.segments[0].analogsignals), 8)
        self.assertEqual(len(block.channel_indexes), 8)
        self.assertEqual(len(block.channel_indexes[0].units), 0)
        self.assertEqual(len(block.segments[0].events), 0)
        self.assertEqual(len(block.segments[0].spiketrains), 0)

        # NOTE: channel 6 does not contain any unit
        block = b.read_block(n_starts=[None, 3000 * pq.ms],
                             n_stops=[1000 * pq.ms, None],
                             channels=range(1, 9),
                             nsx_to_load='none',
                             units={
                                 1: 0,
                                 5: 0,
                                 6: 0
                             },
                             load_events=True,
                             load_waveforms=True)

        self.assertEqual(len(block.segments), 2)
        self.assertEqual(len(block.segments[0].analogsignals), 0)
        self.assertEqual(len(block.channel_indexes), 8)
        self.assertEqual(len(block.channel_indexes[0].units), 1)
        self.assertEqual(len(block.segments[0].events), 0)
        self.assertEqual(len(block.segments[0].spiketrains), 2)
Ejemplo n.º 28
0
    def test_compare_blackrockio_with_matlabloader_V23(self):
        """
        This test compares the output of BlackrockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a .mat
        file created by the script create_data_matlab_blackrock.m.

        The function tests LFPs, spike times, and digital events on channels
        1-8 and spike waveforms on channel 1, unit 0.
        
        For details on the file contents, refer to FileSpec2.3.txt
        """

        # Turns false on error
        allok = True

        # Load data from Matlab generated files
        ml = scipy.io.loadmat(
            os.path.join(tempfile.gettempdir(), 'files_for_testing_neo',
                         'blackrock', 'FileSpec2.3001.mat'))
        lfp_ml = ml['lfp']  # (channel x time) LFP matrix
        ts_ml = ml['ts']  # spike time stamps
        elec_ml = ml['el']  # spike electrodes
        unit_ml = ml['un']  # spike unit IDs
        wf_ml = ml['wf']  # waveform unit 1 channel 1
        mts_ml = ml['mts']  # marker time stamps
        mid_ml = ml['mid']  # marker IDs

        # Load data in channels 1-3 from original data files using neo framework
        try:
            session = BlackrockIO(os.path.join(tempfile.gettempdir(),
                                               'files_for_testing_neo',
                                               'blackrock', 'FileSpec2.3001'),
                                  print_diagnostic=False)
            block = session.read_block(n_starts=[None],
                                       n_stops=[None],
                                       channel_list=range(1, 9),
                                       nsx=5,
                                       units=[],
                                       events=True,
                                       waveforms=True)
        except:
            allok = False

        # Check if analog data on channels 1-8 are equal
        for rcg_i in block.recordingchannelgroups:
            # Should only have one recording channel per group
            if len(rcg_i.recordingchannels) != 1:
                allok = False

            rc = rcg_i.recordingchannels[0]
            idx = rc.index
            if idx in range(1, 9):
                if np.any(rc.analogsignals[0].base - lfp_ml[idx - 1, :]):
                    allok = False

        # Should only have one segment
        if len(block.segments) != 1:
            allok = False

        # Check if spikes in channels 1,3,5,7 are equal
        for st_i in block.segments[0].spiketrains:
            channelid = st_i.annotations['channel_id']
            if channelid in range(1, 7, 2):
                unitid = st_i.annotations['unit_id']
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                if np.any(st_i.base - matlab_spikes):
                    allok = False

                # Check waveforms of channel 1, unit 0
                if channelid == 1 and unitid == 0:
                    if np.any(st_i.waveforms - wf_ml):
                        allok = False

        # Check if digital marker events are equal
        for ea_i in block.segments[0].eventarrays:
            if 'digital_marker' in ea_i.annotations.keys(
            ) and ea_i.annotations['digital_marker'] == True:
                markerid = ea_i.annotations['marker_id']
                matlab_digievents = mts_ml[np.nonzero(mid_ml == markerid)]
                if np.any(ea_i.times.base - matlab_digievents):
                    allok = False

        # Check if analog marker events are equal
        # Currently not implemented by the Matlab loader
        for ea_i in block.segments[0].eventarrays:
            if 'analog_marker' in ea_i.annotations.keys(
            ) and ea_i.annotations['analog_marker'] == True:
                markerid = ea_i.annotations['marker_id']
                matlab_anaevents = mts_ml[np.nonzero(mid_ml == markerid)]
                if np.any(ea_i.times.base - matlab_anaevents):
                    allok = False

        # Final result
        self.assertTrue(allok)
Ejemplo n.º 29
0
    def test_compare_blackrockio_with_matlabloader_v21(self):
        """
        This test compares the output of BlackrockIO.read_block() with the
        output generated by a Matlab implementation of a Blackrock file reader
        provided by the company. The output for comparison is provided in a
        .mat file created by the script create_data_matlab_blackrock.m.
        The function tests LFPs, spike times, and digital events.
        """

        dirname = get_test_file_full_path(ioclass=BlackrockIO,
                                          filename='blackrock_2_1/l101210-001',
                                          directory=self.local_test_dir,
                                          clean=False)
        # First run with parameters for ns5, then run with correct parameters for ns2
        parameters = [('blackrock_2_1/l101210-001_nev-02_ns5.mat', {
            'nsx_to_load': 5,
            'nev_override': '-'.join([dirname, '02'])
        }), ('blackrock_2_1/l101210-001.mat', {
            'nsx_to_load': 2
        })]
        for index, param in enumerate(parameters):
            # Load data from matlab generated files
            ml = scipy.io.loadmat(
                get_test_file_full_path(ioclass=BlackrockIO,
                                        filename=param[0],
                                        directory=self.local_test_dir,
                                        clean=False))
            lfp_ml = ml['lfp']  # (channel x time) LFP matrix
            ts_ml = ml['ts']  # spike time stamps
            elec_ml = ml['el']  # spike electrodes
            unit_ml = ml['un']  # spike unit IDs
            wf_ml = ml['wf']  # waveforms
            mts_ml = ml['mts']  # marker time stamps
            mid_ml = ml['mid']  # marker IDs

            # Load data from original data files using the Neo BlackrockIO
            session = BlackrockIO(dirname, verbose=False, **param[1])
            block = session.read_block(load_waveforms=True,
                                       signal_group_mode='split-all')
            # Check if analog data are equal
            self.assertGreater(len(block.channel_indexes), 0)
            for i, chidx in enumerate(block.channel_indexes):
                # Break for ChannelIndexes for Units that don't contain any Analogsignals
                if len(chidx.analogsignals) == 0 and len(chidx.units) >= 1:
                    break
                # Should only have one AnalogSignal per ChannelIndex
                self.assertEqual(len(chidx.analogsignals), 1)

                # Find out channel_id in order to compare correctly
                idx = chidx.analogsignals[0].annotations['channel_id']
                # Get data of AnalogSignal without pq.units
                anasig = np.squeeze(chidx.analogsignals[0].base[:].magnitude)
                # Test for equality of first nonzero values of AnalogSignal
                #                                   and matlab file contents
                # If not equal test if hardcoded gain is responsible for this
                # See BlackrockRawIO ll. 1420 commit 77a645655605ae39eca2de3ee511f3b522f11bd7
                j = 0
                while anasig[j] == 0:
                    j += 1
                if lfp_ml[i, j] != np.squeeze(
                        chidx.analogsignals[0].base[j].magnitude):
                    anasig = anasig / 152.592547
                    anasig = np.round(anasig).astype(int)

                # Special case because id 142 is not included in ns2 file
                if idx == 143:
                    idx -= 1
                if idx > 128:
                    idx = idx - 136

                assert_equal(anasig, lfp_ml[idx - 1, :])

            # Check if spikes are equal
            self.assertEqual(len(block.segments), 1)
            for st_i in block.segments[0].spiketrains:
                channelid = st_i.annotations['channel_id']
                unitid = st_i.annotations['unit_id']

                # Compare waveforms
                matlab_wf = wf_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml ==
                                   unitid)), :][0]
                # Atleast_2d as correction for waveforms that are saved
                # in single dimension in SpikeTrain
                # because only one waveform is available
                assert_equal(
                    np.atleast_2d(np.squeeze(st_i.waveforms).magnitude),
                    matlab_wf)

                # Compare spike timestamps
                matlab_spikes = ts_ml[np.nonzero(
                    np.logical_and(elec_ml == channelid, unit_ml == unitid))]
                # Going sure that unit is really seconds and not 1/30000 seconds
                if (not st_i.units == pq.CompoundUnit("1.0/{0} * s".format(30000))) and \
                        st_i.units == pq.s:
                    st_i = np.round(st_i.base * 30000).astype(int)
                assert_equal(st_i, matlab_spikes)

            # Check if digital input port events are equal
            self.assertGreater(len(block.segments[0].events), 0)
            for ea_i in block.segments[0].events:
                if ea_i.name == 'digital_input_port':
                    # Get all digital event IDs in this recording
                    marker_ids = set(ea_i.labels)
                    for marker_id in marker_ids:
                        python_digievents = np.round(
                            ea_i.times.base[ea_i.labels == marker_id] *
                            30000).astype(int)
                        matlab_digievents = mts_ml[np.nonzero(
                            mid_ml == int(marker_id))]
                        assert_equal(python_digievents, matlab_digievents)
Ejemplo n.º 30
0
    def test_inputs_V23(self):
        """
        Test various inputs to BlackrockIO.read_block with version 2.3 file to check for parsing errors.
        """

        # Turns false on error
        allok = True

        try:
            b = BlackrockIO(os.path.join(tempfile.gettempdir(),
                                         'files_for_testing_neo', 'blackrock',
                                         'FileSpec2.3001'),
                            print_diagnostic=False)

            # Load data to maximum extent, one None is not given as list
            block = b.read_block(n_starts=[None],
                                 n_stops=None,
                                 channel_list=range(1, 9),
                                 nsx=5,
                                 units=[],
                                 events=True,
                                 waveforms=False)
            lena = len(block.segments[0].analogsignals[0])
            numspa = len(block.segments[0].spiketrains[0])

            # Load data with very long extent using a negative time and the get_max_time() method
            block = b.read_block(n_starts=[-100 * pq.ms],
                                 n_stops=[b.get_max_time()],
                                 channel_list=range(1, 9),
                                 nsx=[5],
                                 units=[],
                                 events=False,
                                 waveforms=False)
            lenb = len(block.segments[0].analogsignals[0])
            numspb = len(block.segments[0].spiketrains[0])

            # Same length of analog signal? Both should have read the complete data set!
            if lena != lenb:
                allok = False
            # Same length of spike train? Both should have read the complete data set!
            if numspa != numspb:
                allok = False

            # Load data with very long extent, n_starts and n_stops not given as list
            block = b.read_block(n_starts=100 * b.nsx_unit[5],
                                 n_stops=200 * b.nsx_unit[5],
                                 channel_list=range(1, 9),
                                 nsx=5,
                                 units=[],
                                 events=False,
                                 waveforms=False)
            lena = len(block.segments[0].analogsignals[0])

            block = b.read_block(n_starts=301 * b.nsx_unit[5],
                                 n_stops=401 * b.nsx_unit[5],
                                 channel_list=range(1, 9),
                                 nsx=5,
                                 units=[],
                                 events=False,
                                 waveforms=False)
            lenb = len(block.segments[0].analogsignals[0])

            # Same length?
            if lena != lenb:
                allok = False

            # Length should be 100 samples exactly
            if lena != 100:
                allok = False

            # Load partial data types and check if this is selection is made
            block = b.read_block(n_starts=None,
                                 n_stops=None,
                                 channel_list=range(1, 9),
                                 nsx=5,
                                 units=None,
                                 events=False,
                                 waveforms=True)
            if len(block.segments) != 1:
                allok = False
            if len(block.segments[0].analogsignals) != 8:
                allok = False
            if len(block.recordingchannelgroups) != 8:
                allok = False
            if len(block.recordingchannelgroups[0].units) != 0:
                allok = False
            if len(block.segments[0].eventarrays) != 0:
                allok = False
            if len(block.segments[0].spiketrains) != 0:
                allok = False

            block = b.read_block(n_starts=[None, 3000 * pq.ms],
                                 n_stops=[1000 * pq.ms, None],
                                 channel_list=range(1, 9),
                                 nsx=None,
                                 units={
                                     1: 0,
                                     5: 0,
                                     6: 0
                                 },
                                 events=True,
                                 waveforms=True)
            if len(block.segments) != 2:
                allok = False
            if len(block.segments[0].analogsignals) != 0:
                allok = False
            if len(block.recordingchannelgroups) != 8:
                allok = False
            if len(block.recordingchannelgroups[0].units) != 1:
                allok = False
            # if len(block.recordingchannelgroups[4].units) != 0:  # only one of two neurons on channel 78, and only one unit for two segments!
            #    allok = False
            if len(block.segments[0].eventarrays) == 0:
                allok = False
            if len(block.segments[0].spiketrains[0].waveforms) == 0:
                allok = False

        except:
            allok = False

        self.assertTrue(allok)
Ejemplo n.º 31
0
    def test_load_waveforms(self):
        filename = self.get_filename_path('FileSpec2.3001')
        reader = BlackrockIO(filename=filename, verbose=False)

        bl = reader.read_block(load_waveforms=True)
        assert_neo_object_is_compliant(bl)
Ejemplo n.º 32
0
    def test_segment_detection_pause(self):
        """
        This test makes sure segments are detected correctly when pause was used during recording.
        """

        # Path to nev that has spikes that don't fit nsX segment
        filename_nev_outside_seg = self.get_filename_path(
            'segment/PauseSpikesOutside/pause_spikes_outside_seg')
        # Path to nsX and nev that are correct
        filename = self.get_filename_path('segment/PauseCorrect/pause_correct')

        # This issues a warning, because there are spikes a long time after the last segment
        # And another one because there are spikes between segments
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            reader = BlackrockIO(filename=filename, nsx_to_load=2,
                                 nev_override=filename_nev_outside_seg)
            self.assertGreaterEqual(len(w), 2)

            # Check that warnings are correct
            messages = [str(warning.message) for warning in w if warning.category == UserWarning]
            self.assertIn('Spikes outside any segment. Detected on segment #1', messages)
            self.assertIn('Spikes 0.0776s after last segment.', messages)

        block = reader.read_block(load_waveforms=False, signal_group_mode="split-all")

        # 2 segments
        self.assertEqual(len(block.segments), 2)

        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        # This value is so high, because a spike occurred right before the second segment
        # And thus is added to the first segment
        # This is not normal behavior and occurs because of the way the files were cut
        # into test files
        self.assertAlmostEqual(block.segments[0].t_stop.magnitude, 15.83916667)
        # Clock is not reset
        self.assertEqual(block.segments[1].t_start.magnitude, 31.0087)
        # Segment time is longer here as well because of spikes after second segment
        self.assertEqual(block.segments[1].t_stop.magnitude, 35.0863)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        # The AnalogSignal is only 4 seconds long, as opposed to the segment
        # whose length is caused by the additional spike
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 31.0087)
        self.assertAlmostEqual(block.segments[1].analogsignals[0].t_stop.magnitude, 35.0087,
                               places=6)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertAlmostEqual(block.segments[0].spiketrains[0].t_stop.magnitude, 15.83916667,
                               places=8)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 31.0087)
        self.assertEqual(block.segments[1].spiketrains[0].t_stop, 35.0863)

        # Each segment has same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # Analogsignals have exactly 4000 samples
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4000)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 4000)

        # This case is correct, no spikes outside segment or anything
        reader = BlackrockIO(filename=filename, nsx_to_load=2)
        block = reader.read_block(load_waveforms=False, signal_group_mode="split-all")

        # 2 segments
        self.assertEqual(len(block.segments), 2)

        # Checking all times are correct as read from file itself
        # (taking neo calculations into account)
        self.assertEqual(block.segments[0].t_start, 0.0)
        # Now segment time is only 4 seconds, because there were no additional spikes
        self.assertEqual(block.segments[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].t_start, 31.0087)
        self.assertAlmostEqual(block.segments[1].t_stop.magnitude, 35.0087, places=6)
        self.assertEqual(block.segments[0].analogsignals[0].t_start, 0.0)
        self.assertEqual(block.segments[0].analogsignals[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].analogsignals[0].t_start, 31.0087)
        self.assertAlmostEqual(block.segments[1].analogsignals[0].t_stop.magnitude, 35.0087,
                               places=6)
        self.assertEqual(block.segments[0].spiketrains[0].t_start, 0.0)
        self.assertEqual(block.segments[0].spiketrains[0].t_stop, 4.0)
        self.assertEqual(block.segments[1].spiketrains[0].t_start, 31.0087)
        self.assertAlmostEqual(block.segments[1].spiketrains[0].t_stop.magnitude, 35.0087,
                               places=6)

        # Each segment has same number of analogsignals
        self.assertEqual(len(block.segments[0].analogsignals),
                         len(block.segments[1].analogsignals))

        # ns2 was created in such a way that all analogsignals have 4000 samples
        self.assertEqual(len(block.segments[0].analogsignals[0][:]), 4000)
        self.assertEqual(len(block.segments[1].analogsignals[0][:]), 4000)