Beispiel #1
0
def copy(kfile, afile, datatypes=0):
    """
    copies the contents of .kwd hdf5 file to a .arf hdf5 file
    """

    # copy top level attributes
    for k,v in kfile.attrs.items():
        afile.attrs[k] = v

    # descend into /recordings
    recordings = kfile["recordings"]
    # recordings holds the "entries", which have names "0", "1", "2", etc
    for kname, kentry in recordings.items():
        timestamp = 0  # TODO determine correct timestamp
        e = arf.create_entry(afile, kname, timestamp)
        for k,v in kentry.attrs.items():
            e.attrs[k] = v
        kdata = kentry["data"]
        if len(datatypes) == 1:
            datatypes = datatypes * kdata.shape[1]
        else:
            assert len(datatypes) == kdata.shape[1]
        channel_bit_volts = kentry["application_data"].attrs["channel_bit_volts"]
        channel_sample_rates = kentry["application_data"].attrs["channel_sample_rates"]
        # kwik files are SxN datasets, while in arf it's N datasets of length S
        for i in range(kdata.shape[1]):
            dset = arf.create_dataset(e, name=str(i), data=np.array([],dtype=np.int16),
                                      maxshape=(kdata.shape[0],),
                                      sampling_rate=channel_sample_rates[i],
                                      units='samples', datatype=datatypes[i],
                                      compression=6)
            dset.attrs["bit_volts"] = channel_bit_volts[i]
            for j in range(int(kdata.shape[0]/BUFFERSIZE) + 1):
                index = j*BUFFERSIZE
                arf.append_data(dset, kdata[index:index + BUFFERSIZE, i])
Beispiel #2
0
def test07_append_to_table():
    fp = arf.open_file("test07", mode="a", driver="core", backing_store=False)
    dtype = nx.dtype({'names': ("f1","f2"), 'formats': [nx.uint, nx.int32]})
    dset = arf.create_table(fp, 'test', dtype=dtype)
    assert_equal(dset.shape[0], 0)
    arf.append_data(dset, (5, 10))
    assert_equal(dset.shape[0], 1)
Beispiel #3
0
    def add_label(self, name, start, stop):
        """adds label to plot if possible.  Returns the index of the new label in the sorted dataset"""        
        if self.lbl.maxshape != (None,): #if dataset is extensible
            win = self.parentWidget().getViewWidget()        
            reply = QMessageBox.question(win,"", "Label cannot be added because dataset is not extensible. Replace with extensible dataset?", QMessageBox.No | QMessageBox.Yes, QMessageBox.No)
            self.key = None  #because keyReleaseEvent may be ignored during message box display                
            if reply == QMessageBox.Yes:
                lbl = self.file[self.path] 
                self.lbl = replace_dataset(lbl, lbl.parent, data=lbl[:], maxshape=(None,))
            else:
                return 

        elif self.file.mode == 'r':
            win = self.parentWidget().getViewWidget()
            QMessageBox.critical(win,"", "Cannot add label. Make sure you have write permission for this file.", QMessageBox.Ok)
            self.key = None
            return 

        arf.append_data(self.lbl,(name,start,stop))
        self.double_clicked = np.append(self.double_clicked,False)
        sort_idx = self.sort_lbl()
        new_idx = np.argmax(sort_idx==(len(self.lbl)-1))
        self.plot_all_events()

        return new_idx
Beispiel #4
0
def test07_append_to_table():
    fp = arf.open_file("test07", mode="a", driver="core", backing_store=False)
    dtype = nx.dtype({'names': ("f1","f2"), 'formats': [nx.uint, nx.int32]})
    dset = arf.create_table(fp, 'test', dtype=dtype)
    assert_equal(dset.shape[0], 0)
    arf.append_data(dset, (5, 10))
    assert_equal(dset.shape[0], 1)
Beispiel #5
0
    def save(stream, filename, path, sampling_rate=None, chunk_size=None):
        """ Saves a Stream object to an .arf file.
            Can't be called by an instance of ArfStreamer.
        """
        if chunk_size == None:
            chunk_size = stream.chunk_size

        if sampling_rate == None:
            raise Exception("You must specify the sampling rate in ArfStreamer.save")
        
        with arf.open_file(filename, 'a') as file:
            path = path.split("/")
            dst_name = path[-1]
            grp_path = "/".join(path[:-1])
            grp = file.require_group(grp_path)            
            #Get first batch of data
            data = stream.read(chunk_size)
            try:
                dst = arf.create_dataset(grp, dst_name, data,
                    maxshape=(None,), sampling_rate=sampling_rate)
            except:
                raise ValueError('Error, maybe dataset with that name already exists')
            while True:
                data = stream.read(chunk_size)
                if len(data) == 0:
                    break
                arf.append_data(dst, data)
            file.flush()
Beispiel #6
0
def match_stimuli(stimuli, entries, sampling_rate, table_name='stimuli'):
    """
    Create labels in arf entries that indicate stimulus onset and
    offset.  As the explog (or equivalent logfile) is parsed, the
    onset times of the entries and stimuli are collected.  Based on
    these times, this function matches each item in the list of
    stimuli to an entry.

    stimuli: dictionary of onset, stimulus_name pairs
    entries: dictionary of onset, arf entry pairs
    sampling_rate: the sampling rate of the onset times
    table_name:  the name of the node to store the label data in
    """
    log.debug("Matching stimuli to entries:")
    entry_times = nx.sort(list(entries.keys()))
    # slow, but simple
    for onset in sorted(stimuli.keys()):
        stim = stimuli[onset]
        idx = entry_times.searchsorted(onset, side='right') - 1
        if idx < 0 or idx > entry_times.size:
            log.debug("%s (onset=%d) -> no match!", stim, onset)
            continue

        eonset = entry_times[idx]
        entry = entries[eonset]

        units = 'samples'
        t_onset = onset - eonset

        # check that stim isn't occuring after the end of the recording
        max_length = max(dset.size for dset in entry.values())
        if t_onset >= max_length:
            log.debug("%s (onset=%d) -> after end of last entry", stim, onset)
            continue
        log.debug("%s (onset=%d) -> %s @ %d samples",
                  stim, onset, entry.name, t_onset)

        # add to list of intervals. this is trickier in h5py
        if table_name not in entry:
            stimtable = arf.create_table(
                entry, table_name, event_dtype,
                sampling_rate=sampling_rate,
                units=units, datatype=arf.DataTypes.EVENT)
            arf.set_uuid(
                stimtable, get_uuid(entry.attrs['pen'], entry.attrs['site'], table_name))
        else:
            stimtable = entry[table_name]
        arf.append_data(stimtable, (t_onset, 0x00, stim))
        entry.attrs['protocol'] = stim
Beispiel #7
0
    def mouseClickEvent(self, event):
        if event.button() == Qt.LeftButton:
            pos=self.getViewBox().mapSceneToView(event.scenePos())
            t = pos.x() * self.scaling_factor
            if self.key and not self.activeLabel:
                arf.append_data(self.lbl, (self.key, t, t))
                self.activeLabel = self.plot_complex_event(self.lbl[-1]) 
                if event.modifiers() != Qt.ShiftModifier:
                    self.sort_lbl()
                    self.activeLabel = None
            elif self.activeLabel:
                if t >= self.lbl[-1]['start']:
                    self.lbl[-1] = (self.lbl[-1]['name'], self.lbl[-1]['start'], t)
                else:
                    self.lbl[-1] =  (self.lbl[-1]['name'], t, self.lbl[-1]['stop'])

                new_region = (np.array([self.lbl[-1]['start'], self.lbl[-1]['stop']])/ 
                              self.scaling_factor)

                self.activeLabel.setRegion(new_region)
                self.activeLabel = None
                self.sort_lbl()
Beispiel #8
0
def test07_append_to_table():
    fp = arf.open_file("test07", mode="a", driver="core", backing_store=False)
    dset = arf.create_table(fp, 'test', dtype=nx.dtype([('f1', nx.uint), ('f2', nx.int32)]))
    assert_equal(dset.shape[0], 0)
    arf.append_data(dset, (5, 10))
    assert_equal(dset.shape[0], 1)