Beispiel #1
0
    def test_find_epochs(self):
        global test_file_large
        hdf5file = test_file_large[:-4]+'.hdf5'
        hdf5file = os.path.join('data', hdf5file)
        daq = Daq()
        daq.read_hd5(hdf5file)
        logstream0 = daq['SCC_LogStreams'][0,:]

        rs = OrderedDict(
                 [(1,  FrameSlice(start=313314, stop=313826, step=None)), 
                  (2,  FrameSlice(start=313826, stop=317218, step=None)), 
                  (3,  FrameSlice(start=317218, stop=317734, step=None)), 
                  (11, FrameSlice(start=336734, stop=337242, step=None)), 
                  (12, FrameSlice(start=337242, stop=340658, step=None)), 
                  (13, FrameSlice(start=340658, stop=341198, step=None)), 
                  (21, FrameSlice(start=357834, stop=358330, step=None)), 
                  (22, FrameSlice(start=358330, stop=361818, step=None)), 
                  (23, FrameSlice(start=361818, stop=362362, step=None)), 
                  (31, FrameSlice(start=381626, stop=382126, step=None)), 
                  (32, FrameSlice(start=382126, stop=385446, step=None)), 
                  (33, FrameSlice(start=385446, stop=385918, step=None)), 
                  (41, FrameSlice(start=407334, stop=407814, step=None)), 
                  (42, FrameSlice(start=407814, stop=411238, step=None)), 
                  (43, FrameSlice(start=411238, stop=411746, step=None))]
                        )
                          
        epochs = logstream.find_epochs(logstream0)
        
        self.assertEqual(epochs, rs)        
Beispiel #2
0
    def test_find_epochs(self):
        global test_file_large
        hdf5file = test_file_large[:-4] + '.hdf5'
        hdf5file = os.path.join('data', hdf5file)
        daq = Daq()
        daq.read_hd5(hdf5file)
        logstream0 = daq['SCC_LogStreams'][0, :]

        rs = OrderedDict([
            (1, FrameSlice(start=313314, stop=313826, step=None)),
            (2, FrameSlice(start=313826, stop=317218, step=None)),
            (3, FrameSlice(start=317218, stop=317734, step=None)),
            (11, FrameSlice(start=336734, stop=337242, step=None)),
            (12, FrameSlice(start=337242, stop=340658, step=None)),
            (13, FrameSlice(start=340658, stop=341198, step=None)),
            (21, FrameSlice(start=357834, stop=358330, step=None)),
            (22, FrameSlice(start=358330, stop=361818, step=None)),
            (23, FrameSlice(start=361818, stop=362362, step=None)),
            (31, FrameSlice(start=381626, stop=382126, step=None)),
            (32, FrameSlice(start=382126, stop=385446, step=None)),
            (33, FrameSlice(start=385446, stop=385918, step=None)),
            (41, FrameSlice(start=407334, stop=407814, step=None)),
            (42, FrameSlice(start=407814, stop=411238, step=None)),
            (43, FrameSlice(start=411238, stop=411746, step=None))
        ])

        epochs = logstream.find_epochs(logstream0)

        self.assertEqual(epochs, rs)
Beispiel #3
0
def attach_metadata(hd5_file):
    """
    The Daq files have an 'etc' dict attribute inteaded for users to
    store analysis relevant metadata. The etc dict can be exported to hdf5
    and reloaded from hdf5. We want to go through and build these dicts so
    that the information is at our fingertips when we need it.
    """
    
    global latin_square, wind_speeds

    t0 = time.time()

    tmp_file = hd5_file + '.tmp'

    # load hd5
    daq = Daq()
    daq.read_hd5(hd5_file)
    
    etc = {} # fill and pack in daq
    
    # find the real participant id (pid) from the file path
    etc['pid'] = int(hd5_file.split('\\')[0][4:])
    etc['scen_order'] = latin_square[(etc['pid']-1)%10]
    etc['wind_speeds'] = wind_speeds

    # now to find the epochs
    # epochs is a dictionary. The keys are the enumerated states and the
    # values are FrameSlice objects. The FrameSlice objects can be used
    # to slice the Elements in Daq instances.
    etc['epochs'] = find_epochs(daq['SCC_LogStreams'][0,:])

    daq.etc = etc # doing it this way ensures we know what is in there

    # write to temporary file.
    # once that completes, delete original hd5, and rename temporary file.
    # This protects you from yourself. If you get impatient and kill the
    # kernel there is way less risk of corrupting your hd5.
    daq.write_hd5(tmp_file)
    os.remove(hd5_file)
    os.rename(tmp_file, hd5_file)
    
    del daq

    return time.time() - t0