Beispiel #1
0
    def test0(self):
        global test_file
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        self.assertFalse(daq['VDS_Veh_Speed'].isCSSDC())
Beispiel #2
0
    def test_find_epochs(self):
        global test_file_large
        hdf5file = test_file_large[:-4] + '.hdf5'
        hdf5file = os.path.join('data', hdf5file)
        daq = Daq()
        daq.read_hd5(hdf5file)
        logstream0 = daq['SCC_LogStreams'][0, :]

        rs = OrderedDict([
            (1, FrameSlice(start=313314, stop=313826, step=None)),
            (2, FrameSlice(start=313826, stop=317218, step=None)),
            (3, FrameSlice(start=317218, stop=317734, step=None)),
            (11, FrameSlice(start=336734, stop=337242, step=None)),
            (12, FrameSlice(start=337242, stop=340658, step=None)),
            (13, FrameSlice(start=340658, stop=341198, step=None)),
            (21, FrameSlice(start=357834, stop=358330, step=None)),
            (22, FrameSlice(start=358330, stop=361818, step=None)),
            (23, FrameSlice(start=361818, stop=362362, step=None)),
            (31, FrameSlice(start=381626, stop=382126, step=None)),
            (32, FrameSlice(start=382126, stop=385446, step=None)),
            (33, FrameSlice(start=385446, stop=385918, step=None)),
            (41, FrameSlice(start=407334, stop=407814, step=None)),
            (42, FrameSlice(start=407814, stop=411238, step=None)),
            (43, FrameSlice(start=411238, stop=411746, step=None))
        ])

        epochs = logstream.find_epochs(logstream0)

        self.assertEqual(epochs, rs)
Beispiel #3
0
    def test1(self):
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        self.assertTrue(daq['TPR_Tire_Surf_Type'].isCSSDC())
Beispiel #4
0
 def test0(self):
     global test_file
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
     
     self.assertFalse(daq['VDS_Veh_Speed'].isCSSDC())
Beispiel #5
0
 def test1(self):
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
     
     self.assertTrue(daq['TPR_Tire_Surf_Type'].isCSSDC())
Beispiel #6
0
 def test_load_with_wcelemlist(self):
     global test_file
     rs = ['VDS_Veh_Dynamic_Pres',
           'SCC_DynObj_CvedId',
           'VDS_Veh_Eng_RPM',
           'SCC_DynObj_RollPitch',
           'SCC_DynObj_ColorIndex',
           'VDS_Veh_Eng_Torque',
           'SCC_DynObj_AudioVisualState',
           'SCC_DynObj_Vel',
           'SCC_DynObj_DataSize',
           'VDS_Veh_Dist',
           'VDS_Veh_Heading',
           'SCC_DynObj_HcsmType',
           'SCC_DynObj_SolId',
           'SCC_DynObj_Pos',
           'VDS_Veh_Speed',
           'SCC_DynObj_Heading',
           'SCC_DynObj_Name']
     
     daq = Daq()
     daq.read(os.path.join('data', test_file),
              elemlist=['VDS_Veh*','SCC_Dyn*']) 
         
     assert_array_equal(rs, daq.keys())
Beispiel #7
0
    def test_find_epochs(self):
        global test_file_large
        hdf5file = test_file_large[:-4]+'.hdf5'
        hdf5file = os.path.join('data', hdf5file)
        daq = Daq()
        daq.read_hd5(hdf5file)
        logstream0 = daq['SCC_LogStreams'][0,:]

        rs = OrderedDict(
                 [(1,  FrameSlice(start=313314, stop=313826, step=None)), 
                  (2,  FrameSlice(start=313826, stop=317218, step=None)), 
                  (3,  FrameSlice(start=317218, stop=317734, step=None)), 
                  (11, FrameSlice(start=336734, stop=337242, step=None)), 
                  (12, FrameSlice(start=337242, stop=340658, step=None)), 
                  (13, FrameSlice(start=340658, stop=341198, step=None)), 
                  (21, FrameSlice(start=357834, stop=358330, step=None)), 
                  (22, FrameSlice(start=358330, stop=361818, step=None)), 
                  (23, FrameSlice(start=361818, stop=362362, step=None)), 
                  (31, FrameSlice(start=381626, stop=382126, step=None)), 
                  (32, FrameSlice(start=382126, stop=385446, step=None)), 
                  (33, FrameSlice(start=385446, stop=385918, step=None)), 
                  (41, FrameSlice(start=407334, stop=407814, step=None)), 
                  (42, FrameSlice(start=407814, stop=411238, step=None)), 
                  (43, FrameSlice(start=411238, stop=411746, step=None))]
                        )
                          
        epochs = logstream.find_epochs(logstream0)
        
        self.assertEqual(epochs, rs)        
Beispiel #8
0
    def test_load(self):
        global test_file
        
        daq = Daq()
        daq.read(os.path.join('data', test_file))

        old_daq = old_convert_daq.read_file(os.path.join('data', test_file))
        self.__assert_old_daq_equals_daq(old_daq, daq)
Beispiel #9
0
    def test0(self):
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        ds = daq['VDS_Veh_Speed'].toarray()
        self.assertFalse(isinstance(ds, Element))
        self.assertEqual(ds.shape, (1L, 10658L))
Beispiel #10
0
 def test8(self):
     """row indx int"""
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
     
     ds = daq['TPR_Tire_Surf_Type'][4, findex(5840)]
     self.assertEqual(0, ds)
Beispiel #11
0
    def test8(self):
        """row indx int"""
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        ds = daq['TPR_Tire_Surf_Type'][4, findex(5840)]
        self.assertEqual(0, ds)
Beispiel #12
0
 def test0(self):
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
     
     ds =daq['VDS_Veh_Speed'].toarray()
     self.assertFalse(isinstance(ds, Element))
     self.assertEqual(ds.shape, (1L, 10658L))
Beispiel #13
0
    def test1(self):
        """frame less than element.frames[0]"""
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        ds = daq['TPR_Tire_Surf_Type'][:, findex(0)]

        self.assertTrue(np.isnan(ds))
Beispiel #14
0
 def test1(self):
     """frame less than element.frames[0]"""
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
 
     ds = daq['TPR_Tire_Surf_Type'][:,findex(0)]
     
     self.assertTrue(np.isnan(ds))
Beispiel #15
0
    def test6(self):
        """row indx slice"""
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        rs = np.array([[1], [1], [1], [1]], dtype=np.int16)

        ds = daq['TPR_Tire_Surf_Type'][:4, findex(5840)]
        assert_array_equal(rs[:, 0], ds[:, 0])
        self.assertFalse(isinstance(ds, Element))
Beispiel #16
0
    def test_readwrite_no_data(self):
        global test_file
        hdf5file = os.path.join('tmp', test_file[:-4]+'_5.hdf5')
        
        daq = Daq()
        daq.read(os.path.join('data', test_file))
        daq.write_hd5(hdf5file)

        daq2 = Daq()
        daq2.read_hd5(hdf5file)

        assert_Daqs_equal(self, daq, daq2)
Beispiel #17
0
 def setUp(self):
     global test_file_large
     hdf5file = test_file_large[:-4]+'.hdf5'
     hdf5file = os.path.join('data', hdf5file)
     
     try:
        with open(hdf5file):
            pass
     except IOError:
        daq = Daq()
        daq.read(os.path.join('data', test_file_large))
        daq.write_hd5(hdf5file)
Beispiel #18
0
 def test2(self):
     """frame > element.frames[-1]"""
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
             
     rs= np.array( [[ 1],[ 1],[ 1],[ 1],[ 0],[ 0],[ 0],[ 0],[ 0],[ 0]], 
                   dtype=np.int16)
                   
     ds = daq['TPR_Tire_Surf_Type'][:, findex(13000)]
     assert_array_equal(rs, ds)
     self.assertFalse(isinstance(ds, Element))
Beispiel #19
0
 def test6(self):
     """row indx slice"""
     global testfile
     hdf5file = test_file[:-4]+'.hdf5'
     daq = Daq()
     daq.read_hd5(os.path.join('data', hdf5file))
     
     rs= np.array( [[1],[1],[ 1],[ 1]], 
                   dtype=np.int16)
     
     ds = daq['TPR_Tire_Surf_Type'][:4, findex(5840)]
     assert_array_equal(rs[:,0], ds[:,0])
     self.assertFalse(isinstance(ds, Element))
Beispiel #20
0
    def test2(self):
        """frame > element.frames[-1]"""
        global testfile
        hdf5file = test_file[:-4] + '.hdf5'
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))

        rs = np.array([[1], [1], [1], [1], [0], [0], [0], [0], [0], [0]],
                      dtype=np.int16)

        ds = daq['TPR_Tire_Surf_Type'][:, findex(13000)]
        assert_array_equal(rs, ds)
        self.assertFalse(isinstance(ds, Element))
Beispiel #21
0
    def test1(self):
        global test_file
        hdf5file = test_file[:-4]+'.hdf5'
    
        elems_indxs = [('CFS_Accelerator_Pedal_Position', 0),
                       ('SCC_Spline_Lane_Deviation', 1),
                       ('SCC_Spline_Lane_Deviation', 3),
                       ('VDS_Tire_Weight_On_Wheels', slice(0,4))]
                     
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))
#        print(daq['VDS_Tire_Weight_On_Wheels'].frames.shape)
        fig = daq.plot_ts(elems_indxs, fslice(6000, None))
        fig.savefig('./output/daq_plots_test.png')
Beispiel #22
0
    def test_readwrite_hd5_file(self):
        global test_file_large
        hdf5file = test_file_large[:-4]+'.hdf5'
                
        daq = Daq()
        daq.read_hd5(os.path.join('data', hdf5file))
        do = daq.dynobjs.values()[0]

        do.write_hd5(filename='./tmp/dynobj_test.hdf5')

        do2 = DynObj()
        do2.read_hd5('./tmp/dynobj_test.hdf5')

        assert_dynobjs_equal(self, do, do2)
Beispiel #23
0
    def test_load_partial(self):
        """test a file that did not close properly"""

        try:
            with open(os.path.join('data', partial)):
                pass
        except:
            return
        
        daq = Daq()
        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', partial))

        self.assertEqual(len(daq.keys()), 246)
        self.assertEqual(daq['VDS_Veh_Speed'].shape, (1, 68646))
Beispiel #24
0
    def test_load_missing(self):
        """test a file with missing frames"""
        missing = 'Left_08_20130426164301.daq'

        try:
            with open(os.path.join('data', missing)):
                pass
        except:
            return
        
        daq = Daq()
        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', missing))

        daq.write_hd5(os.path.join('tmp', 'partial.hdf5'))
        
        daq2 = Daq()
        daq2.read_hd5(os.path.join('tmp', 'partial.hdf5'))

        assert_Daqs_equal(self, daq, daq2)        
Beispiel #25
0
 def test_test_readwrite_hd5_reference(self):
     global test_file_large
     hdf5file = test_file_large[:-4]+'.hdf5'
     hdf5file = os.path.join('data', hdf5file)
             
     daq = Daq()
     daq.read_hd5(hdf5file)
     
     daq2 = Daq()
     daq2.read_hd5(hdf5file)
     for name in daq2.dynobjs.keys():
         assert_dynobjs_equal(self,
                              daq.dynobjs[name],
                              daq2.dynobjs[name])
Beispiel #26
0
 def setUp(self):
     global test_file_large
     hdf5file = test_file_large[:-4]+'.hdf5'
     hdf5file = os.path.join('data', hdf5file)
     
     try:
        with open(hdf5file):
            pass
     except IOError:
        daq = Daq()
        daq.read(os.path.join('data', test_file_large))
        daq.write_hd5(hdf5file)
Beispiel #27
0
    def test_load_with_elemlist(self):
        global test_file
        matfile = os.path.join('tmp', test_file[:-4]+'11.mat')
        matfile2 = os.path.join('tmp', test_file[:-4]+'22.mat')
        
        daq = Daq()
        daq.load_elemlist_fromfile('elemList2.txt')

        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', test_file),
                     process_dynobjs=False)

            
        daq.write_mat(matfile)
        del daq
        daqmat = sio.loadmat(matfile)

        old_convert_daq.convert_daq(os.path.join('data', test_file),
                                    'elemList2.txt',matfile2)
        old_daqmat = sio.loadmat(matfile2)
        self.__assert_daqmats_equal(old_daqmat, daqmat)
Beispiel #28
0
    def test_load_with_elemlist(self):
        global test_file
        
        daq = Daq()
        daq.load_elemlist_fromfile('elemList2.txt')

        # something about nose makes catching the warnings not work
        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', test_file))

        old_daq = old_convert_daq.read_file(os.path.join('data', test_file), 
                                            elemfile='elemList2.txt')

        self.__assert_old_daq_equals_daq(old_daq, daq)
Beispiel #29
0
def attach_metadata(hd5_file):
    """
    The Daq files have an 'etc' dict attribute inteaded for users to
    store analysis relevant metadata. The etc dict can be exported to hdf5
    and reloaded from hdf5. We want to go through and build these dicts so
    that the information is at our fingertips when we need it.
    """
    
    global latin_square, wind_speeds

    t0 = time.time()

    tmp_file = hd5_file + '.tmp'

    # load hd5
    daq = Daq()
    daq.read_hd5(hd5_file)
    
    etc = {} # fill and pack in daq
    
    # find the real participant id (pid) from the file path
    etc['pid'] = int(hd5_file.split('\\')[0][4:])
    etc['scen_order'] = latin_square[(etc['pid']-1)%10]
    etc['wind_speeds'] = wind_speeds

    # now to find the epochs
    # epochs is a dictionary. The keys are the enumerated states and the
    # values are FrameSlice objects. The FrameSlice objects can be used
    # to slice the Elements in Daq instances.
    etc['epochs'] = find_epochs(daq['SCC_LogStreams'][0,:])

    daq.etc = etc # doing it this way ensures we know what is in there

    # write to temporary file.
    # once that completes, delete original hd5, and rename temporary file.
    # This protects you from yourself. If you get impatient and kill the
    # kernel there is way less risk of corrupting your hd5.
    daq.write_hd5(tmp_file)
    os.remove(hd5_file)
    os.rename(tmp_file, hd5_file)
    
    del daq

    return time.time() - t0
Beispiel #30
0
    def test_load(self):
        global test_file
        matfile = os.path.join('tmp', test_file[:-4]+'1.mat')
        matfile2 = os.path.join('tmp', test_file[:-4]+'2.mat')
        
        daq = Daq()
        daq.read(os.path.join('data', test_file),
                 process_dynobjs=False)
        daq.write_mat(matfile)
        del daq
        daqmat = sio.loadmat(matfile)

        old_convert_daq.convert_daq(os.path.join('data', test_file),
                                    '',matfile2)
        old_daqmat = sio.loadmat(matfile2)
        self.__assert_daqmats_equal(old_daqmat, daqmat)
Beispiel #31
0
def main():

    parser = argparse.ArgumentParser(
        description='Get folder location to look for DAQ files')

    parser.add_argument('-dir', action="store", dest="dir_daq")
    parser.add_argument('-elemlist', action="store", dest="elemlist")

    results = parser.parse_args()

    current_directory = os.getcwd()
    target_directory = results.dir_daq  # Replace with the folder that holds all the HDF5 files or the top level folder of where your HDF5 files are

    with open(results.elemlist) as fname:
        elemlist = fname.read().split('\n')

    for root, dirs, files in os.walk(
            os.path.join(current_directory, target_directory)):
        for fname in files:
            if fname.endswith('.daq'):
                try:
                    daq = Daq()
                    print("Trying to read {}".format(fname))
                    daq.read_daq(os.path.join(root, fname),
                                 process_dynobjs=False,
                                 interpolate_missing_frames=True)
                    #daq.write_hd5(os.path.join(root,fname + '.hdf5'))

                    # Daq Size
                    data_length = daq['CFS_Accelerator_Pedal_Position'].shape[
                        1]

                    # Date Index
                    data_run_start = ''.join(
                        re.findall(
                            r'\d+',
                            np.array2string(
                                np.array(daq['RunInst']).flatten())))
                    data_run_start = dt.datetime.strptime(
                        data_run_start, '%Y%m%d%H%M%S')
                    data_run_start = pd.date_range(
                        pd.to_datetime(data_run_start),
                        periods=data_length,
                        freq='0.01667S')

                    data_list = []
                    col_list = []
                    for elem in elemlist:
                        data = np.array(daq[elem]).T
                        num_col = data.shape[1]
                        data_list.append(data)
                        for i in range(num_col):
                            col_list.append(elem + '_' + str(i))

                    data = np.concatenate(data_list, axis=1)

                    df = pd.DataFrame(data=data,
                                      index=data_run_start,
                                      columns=col_list)
                    df.index.rename('timestamp', inplace=True)
                    df.to_csv(os.path.join(root, fname + '.csv'))

                except AssertionError:
                    print("Could not read {}".format(fname))
Beispiel #32
0
    def test_readwrite_with_elemlist_f0fend(self):
        global test_file
        hdf5file = os.path.join('tmp', test_file[:-4]+'_4.hdf5')
        
        daq = Daq()
        daq.load_elemlist_fromfile('elemList2.txt')
        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', test_file)) 
                              
        daq.write_hd5(hdf5file)

        daq2 = Daq()
        daq2.read_hd5(hdf5file, fend=8000)

        self.assertEqual(daq2.frame.frame[-1], 8000)
Beispiel #33
0
    def test_readwrite_with_elemlist(self):
        global test_file
        hdf5file = os.path.join('tmp', test_file[:-4]+'_2.hdf5')
        
        daq = Daq()
        daq.load_elemlist_fromfile('elemList2.txt')
        with warnings.catch_warnings(record=True) as w:
            daq.read(os.path.join('data', test_file)) 
            
                              
        daq.write_hd5(hdf5file)

        daq2 = Daq()
        daq2.read_hd5(hdf5file)

        assert_Daqs_equal(self, daq, daq2)
def mult_ado_by_pid_plot(pids, page):
    global hdf5_files, scenario_names, latin_square, get_pid

    # initialize plot
    fig = plt.figure(figsize=(11*1.75, 8.5*1.75))
    fig.subplots_adjust(left=.04, right=.96,
                        bottom=.05, top=.97,
                        wspace=.04, hspace=.04)

    # plot will have 2 axis. One is for Vehicle Speed, the other is
    # for relative distance of the DynObjs
    xticks = np.linspace(660,5280*1.25,10)
    xlim = [0, 5280*1.25]

    # speed in MPH
    yticks1 = np.linspace(35,75,5)
    ylim1 = [30,80]

    # relative distance in Feet
    yticks2 = np.linspace(-1500,1500,7)
    ylim2 = [-1750,1750]

    # loop through the hdf5_files that contain
    # data for the requested participants
    for hdf5_file in [hd for hd in hdf5_files if get_pid(hd) in pids]:
    
        print('analyzing "%s"'%hdf5_file)
        
        daq = Daq()
        daq.read_hd5(hdf5_file)
        pid = daq.etc['pid']

        # figure out row number
        rnum = pids.index(pid)

        # find the relevant dynamic objects to plot
        platoon = [do for do in daq.dynobjs.values() if 'Ado' in do.name]
        platoon = sorted(platoon, key=attrgetter('name'))

        # for each trial...
        for i in xrange(10):
            
            if not ( i*10+1 in daq.etc['epochs'] and \
                     i*10+3 in daq.etc['epochs']):

                # encountered partial trial
                continue
                
            scenario = daq.etc['scen_order'][i]
            scenario_name = scenario_names[scenario]
            
            print('  PID: %03i, Passing zone: %i (%s)'%(pid, i, scenario_name))
            
            # unpack the start frame of the 1-to-2 lane addition transition
            # and the stop frame of the 2-to-1 lane reduction transition
            f0 = daq.etc['epochs'][i*10+1].start
            fend = daq.etc['epochs'][i*10+3].stop

            # get axis handle
            ax1 = plt.subplot(8, 10, rnum*10 + scenario + 1)

            distance = daq['VDS_Veh_Dist'][0, fslice(f0, fend)].flatten()
            distance -= distance[0]
            
            speed = daq['VDS_Veh_Speed'][0, fslice(f0, fend)].flatten()
            
            ax1.plot(distance, speed, 'b')
            ax2 = ax1.twinx()
            
            # we need to figure out when the Ados enter the passing lane
            # and exit the passing lane. To do this we need to know where
            # the passing lane starts and ends
            pos0 = daq['VDS_Chassis_CG_Position'][:,findex(f0)]
            posend = daq['VDS_Chassis_CG_Position'][:,findex(fend)]

            # loop through and plot ados
            for j, do in enumerate(platoon):

                # each passing zone has its own set of Ados. This
                # sorts out which are actually defined (should be 
                # defined) for this trial.
                if i*10 <= int(do.name[-2:]) < (i+1)*10:
                    print(do.name)
                    
                    # figure out when the ado enters the 
                    d0 = np.sum((do.pos - pos0)**2., axis=0)**.5
                    dend = np.sum((do.pos - posend)**2., axis=0)**.5
                    
                    # indexes relative to do arrays
                    imin0 = d0.argmin() 
                    iminend = dend.argmin()

                    # now we can plot the Ados relative distance to the
                    # vehicle as a function of distance over the passing
                    # lane
                    if iminend-imin0 > 0:
                        distance = do.distance[0, imin0:iminend]
                        distance -= distance[0]
                        rel_distance = do.relative_distance[0, imin0:iminend]
                        ax2.plot(distance, rel_distance, color='g', alpha=0.4)
                    else:
                        print('    %s did not drive '
                              'through passing zone'%do.name)


            # make things pretty
            ax1.set_ylim(ylim1)
            ax1.set_yticks(yticks1)
            if scenario:
                ax1.set_yticklabels(['' for y in yticks1])
            else:
                ax1.set_yticklabels(yticks1, color='b')
            
            ax2.axhline(0, color='k', linestyle=':')
            ax2.set_ylim(ylim2)
            ax2.set_yticks(yticks2)
            if scenario != 9:
                ax2.set_yticklabels(['' for y in yticks2])
            else:
                ax2.set_yticklabels(yticks2, color='g')
                
            ax2.set_xlim(xlim)
            ax2.set_xticks(xticks)

            if rnum == 0:
                ax1.set_title(scenario_names[scenario])
                
            if rnum == len(pids)-1:
                ax1.set_xticklabels(['%i'%x for x in xticks], 
                                    rotation='vertical')
            else:
                ax1.set_xticklabels(['' for x in xticks])

            if not scenario:
                ax1.text(660,35,'Participant: %03i =>'%pids[rnum], 
                         size='small')

    img_name = 'do_passing_behavior__PAGE%i.png'%page
    fig.savefig(img_name, dpi=300)
    plt.close()
Beispiel #35
0
from undaqTools import Daq

if __name__ == '__main__':
    
    for test_file in ['./data/data reduction_20130204125617.daq',
                      './data/Alaska_0_20130301142422.daq']:
        
        hdf5file = test_file[:-4]+'.hdf5'
        
        try:
           with open(hdf5file):
               pass
        except IOError:
           daq = Daq()
           daq.read(test_file)
           daq.write_hd5(hdf5file)
    print('\nStep 1. Find speeds interpolated by distance...')
    t0 = time.time()
    hd5_files = tuple(glob.glob('*/*.hdf5'))

    # define an linearly spaced distance vector to interpolate to
    veh_dist_ip = np.arange(0, 6604, 8)

    # goal of step 1 is to fill this list of lists structure
    interp_spds_by_scenario = [[] for i in xrange(10)]

    for hd5_file in hd5_files:
        print("  interpolating '%s'..."%hd5_file)
        
        # load hd5
        daq = Daq()
        daq.read_hd5(hd5_file)

        # for each trial...
        for i in xrange(10):
            
            if not ( i*10+1 in daq.etc['epochs'] and \
                     i*10+3 in daq.etc['epochs']):

                # encountered partial trial
                continue
            
            scenario = daq.etc['scen_order'][i]
            
            # unpack the start frame of the 1-to-2 lane addition transition
            # and the stop frame of the 2-to-1 lane reduction transition
    print("Changing wd to '%s'" % data_dir)
    os.chdir(data_dir)

    # pyvttbl is in pypi
    # container to hold the collaborated results
    df = DataFrame()

    print('\nCollaborating timeseries measures...')
    t0 = time.time()
    hd5_files = tuple(glob.glob('*/*.hdf5'))

    for hd5_file in hd5_files:
        print("  analyzing '%s'..." % hd5_file)

        # load hd5
        daq = Daq()
        daq.read_hd5(hd5_file)

        # daq.etc was configured in Example02_*
        for (epoch, fslice) in daq.etc['epochs'].items():

            # figure out pid and independent variable conditions
            pid = daq.etc['pid']
            trial = epoch / 10
            scenario = daq.etc['scen_order'][trial]
            section = epoch % 10

            # pack pid and IV conditions into OrderedDict
            row = OrderedDict([('pid', pid), ('trial', trial),
                               ('scenario', scenario), ('section', section)])
Beispiel #38
0
    try:
        rt_fname = sys.argv[1]
        daq_fname = sys.argv[2]
    except:
        print('Expecting routetable file name '
              'and route id as command line args')
        sys.exit()

    # specify broadcast rate in Hz
    try:
        broadcast_rate = int(sys.argv[3])
    except:
        broadcast_rate = 60

    # open datafile to broadcast
    daq = Daq()
    daq.read(daq_fname)
    numframes = len(daq.frame.frame)

    # read the routetable
    rt = RouteTable()
    rt.read(rt_fname)

    # prebuild the device arrays
    vecs = {}
    for route in rt.routes.values():
        for devid in route.devices:
            device = rt.devices[devid]

            if device.id in vecs:
                continue
Beispiel #39
0
 def test_process(self):
     global test_file_large
             
     daq = Daq()
     daq.read(os.path.join('data', test_file_large))
     daq.write_hd5('./tmp/dynobj_process_test.hdf5')