コード例 #1
0
def plot_MIT_baseline(df_resample, fps, out_path):
    fps=29.969664
    df_resample = df_resample.reset_index()
    df_resample['time'] -= df_resample['time'][0]
    df_resample['time'] /= 1000
    df_resample = df_resample.set_index('time')
    fig, axes = plt.subplots(4, 1, figsize=(20, 10))
    grid = plt.GridSpec(2, 3)
    axes[0].plot(df_resample['accx'],  label='accx')
    axes[0].plot(df_resample['accy'], label='accy')
    axes[0].plot(df_resample['accz'], label='accz')
    axes[0].legend(loc='upper right')
    axes[0].set_title('Accelerometer data')

    # plt.set_color_cycle(['red', 'black', 'blue', 'yellow', 'grey'])
    axes[1].plot(df_resample['diff_flowx'], 'b', label='diff_flowx')
    axes[2].plot(df_resample['diff_flowy'], 'orange', label='diff_flowy')
    #ax2.legend(loc='upper right')
    axes[1].set_title('diff flow x')
    axes[2].set_title('diff flow y')

    fftshift = cross_correlation_using_fft(df_resample['diff_flowx'].values, df_resample['accx'].values)
    shift = compute_shift(fftshift)
    axes[3].plot(fftshift)
    axes[3].set_title('diff flowx/accx delta={:.1f} ms'.format(shift * 1000/fps))
    
    plt.subplots_adjust(hspace=0.5)
    plt.savefig(out_path)
    plt.close()
コード例 #2
0
def baseline(mode):
    # load video data
    # session_list = open('../../CMU/session_list').readlines()
    video_dir = '../../CMU/video'
    opt_dir = '../../CMU/opt_flow'
    IMU_dir = '../../CMU/sensor/'
    data_dir = '../../CMU/data'
    FPS = 30
    baseline_dir = os.path.join(data_dir, 'baseline')
    # offsets = np.zeros(len(session_list))
    sensors = ['2794', '2795', '2796', '3261', '3337']
    video = '7150991'
    load_df = True
    # verbose = True
    draw = True
    sensor_dict = {'2794': 'Left Arm', '2795': 'Back', '2796': 'Left Leg', '3261': 'Right Leg', '3337': 'Right Arm'}
    #########################################################
    ##                   Baseline                           #
    #########################################################
    valid_session_file = '../../CMU/valid_sessions_win30_max60.pkl'
    offsets, session_list = pickle.load(open(valid_session_file, 'rb'))
    session_list = session_list['2794']
    if mode == 'x':
        mode_video = 'x'
        mode_imu = 'X'
    if mode == 'PCA':
        mode_video = 'PCA'
        mode_imu = 'PCA'
    shifts_baseline = {}
    skipped_sessions = {}
    valid_sessions = {}
    for sensor in sensors:
        shifts_baseline[sensor] = np.zeros(len(session_list))
        skipped_sessions[sensor] = []
        valid_sessions[sensor] = []
        os.makedirs('figures/baseline_{}_{}/{}'.format(mode_video, mode_imu, sensor), exist_ok=True)
    # load ground truth
    offsets = np.zeros(len(session_list))
    for i, session in enumerate(session_list):
        session = session.strip()
        opt_file = glob.glob(os.path.join(opt_dir, session+'_Video', session+'_7150991-*.pkl'))
        if len(opt_file) > 0:
            opt_file = opt_file[0]
            offsets[i] = int(opt_file[opt_file.find('-')+1:-4])
    offsets = offsets * 1000/FPS
    for i, session in enumerate(session_list):
        session = session.strip()
        print('======== Procession session {} ========'.format(session))
        file = glob.glob(os.path.join(video_dir, session+'_Video', 'STime{}-time-*synch.txt'.format(video)))
        out_dir = os.path.join(baseline_dir, session)
        os.makedirs(out_dir, exist_ok=True)
        df_file = os.path.join(out_dir, 'df_video_{}.pkl'.format(video))
        print('Processing video {}'.format(video))
        if load_df and os.path.exists(df_file):
            print('    loading video df')
            df_video = pd.read_pickle(df_file)
        else:
            print('    creating video df')
            if file:
                df_video = read_video(file[0])
                check_df(df_video, delta=0.033332)
            else:
                print('skipping session{}, video sync doesn\'t exist'.format(session))
                for sensor in sensors:
                    shifts_baseline[sensor][i] = np.nan
                    skipped_sessions[sensor].append(session)
                continue
            opt_file = glob.glob(os.path.join(opt_dir, session+'_Video', session+'_7150991-*.pkl'))
            if opt_file:
                opt_file = opt_file[0]
                #offsets[i] = int(opt_file[opt_file.find('-')+1:-4])
            else:
                print('skipping session{}, optical flow doesn\'t exist'.format(session))
                for sensor in sensors:
                    shifts_baseline[sensor][i] = np.nan
                    skipped_sessions[sensor].append(session)
                continue
            motion = pickle.load(open(opt_file, 'rb'))
            if len(df_video) < len(motion):
                print('skipping session{}, sync stamps less than frames'.format(session))
                for sensor in sensors:
                    shifts_baseline[sensor][i] = np.nan
                    skipped_sessions[sensor].append(session)
                continue                
            df_video = df_video[:len(motion)]
            df_video['flowx'] = motion[:, 0]
            df_video['flowy'] = motion[:, 1]
            df_video['diff_flowx'] = df_video['flowx'].diff()
            df_video['diff_flowy'] = df_video['flowy'].diff()
            df_video = df_video[1:]
            df_video = pca_flow(df_video)
            # df_video.to_pickle(df_file)
        # load sensor data
        for sensor in sensors:
            print('Processing sensor {}'.format(sensor))
            df_file = os.path.join(out_dir, 'df_sensor_{}.pkl'.format(sensor))
            if os.path.exists(df_file):
                print('    loading sensor df')
                df_imu = pd.read_pickle(df_file)
            else:
                print('    creating sensor df')
                sensor_file = glob.glob(os.path.join(IMU_dir, session+'_3DMGX1', '{}_*-time*.txt'.format(sensor)))
                if sensor_file:
                    sensor_file = sensor_file[0]
                else:
                    print('skipping session{}, session time doesn\'t exist'.format(session))
                    skipped_sessions[sensor].append(session)
                    shifts_baseline[sensor][i] = np.nan  
                    continue
                df_imu = read_sensor(sensor_file)
                df_imu = check_df(df_imu, delta=0.008)
                df_imu = pca_sensor(df_imu)
                # df_imu.to_pickle(df_file)
            st_time = max([df_imu.iloc[0]['SysTime'], df_video.iloc[0]['SysTime']])-0.01
            en_time = min([df_imu.iloc[len(df_imu) - 1]['SysTime'], df_video.iloc[len(df_video) - 1]['SysTime']])+0.01
            df_video_tmp = df_video[(df_video['SysTime'] >= st_time) & (df_video['SysTime'] < en_time)]
            df_imu = df_imu[(df_imu['SysTime'] >= st_time) & (df_imu['SysTime'] < en_time)]
            vid_time_stamps = df_video_tmp['SysTime'].values
            df_imu = resample(df_imu, 'SysTime', samplingRate=0,
                                          gapTolerance=200, fixedTimeColumn=vid_time_stamps)
            if df_imu is None:
                print('    no intersection between video and imu, skip session {}'.format(session))
                skipped_sessions[sensor].append(session)
                shifts_baseline[sensor][i] = np.nan 
                continue
            if len(df_video_tmp) != len(df_imu):
                print('    lengths of video and imu not equal, skip session {}'.format(session))
                print('len_vid = {}, len_sen = {}'.format(len(df_video_tmp), len(df_imu)))
                skipped_sessions[sensor].append(session)
                shifts_baseline[sensor][i] = np.nan 
                #df_imu.to_pickle(df_file)
                continue            

            fftshift = cross_correlation_using_fft(df_video_tmp['diff_flow{}'.format(mode_video)].values, \
                                                   df_imu['Accel_{}'.format(mode_imu)].values)
            shifts_baseline[sensor][i] = compute_shift(fftshift)
            if draw:
                path ='figures/baseline_{}_{}/{}/{}.jpg'.format(mode_video, mode_imu, sensor, session)
                plt.figure()
                plt.plot(fftshift[::-1])
                plt.title('Video / {} Sensor, gt = {:.3f}s, predition = {:.3f}s'.format(sensor_dict[sensor], offsets[i]/1000, shifts_baseline[sensor][i] /FPS))
                plt.savefig(path)
                plt.close()  
            valid_sessions[sensor].append(session)
            #plt.figure()
            #plt.plot(fftshift)
            #plt.savefig('{}_{}_{}.png'.format(mode, session, sensor))
    for sensor in sensors:
        shifts_baseline[sensor][67:91] = np.nan    
    error = compute_error(offsets, shifts_baseline)
    error.to_csv(os.path.join(baseline_dir, 'df_error_baseline_{}_{}.csv'.format(mode_video, mode_imu)))
    print(mode_video, mode_imu)
    print(error)
    #print(offsets, shifts_baseline)
    # pickle.dump([offsets, shifts_baseline], open(os.path.join(baseline_dir, 'results_baseline_{}_{}.pkl'.format(mode_video, mode_imu)), 'wb'))
    # pickle.dump(valid_sessions, open(os.path.join(baseline_dir, 'valid_sessions.pkl'), 'wb'))
    #
    # Analysis
    _, shifts_baseline = pickle.load(open(os.path.join(baseline_dir, 'results_baseline_{}_{}.pkl'.format(mode_video, mode_imu)), 'rb'))
    summ_mat = offsets.reshape(-1, 1)
    for sensor in sensors:
        summ_mat = np.concatenate([summ_mat, shifts_baseline[sensor].reshape(-1, 1) - offsets.reshape(-1, 1)], axis=1)
    df_summ = pd.DataFrame(data=summ_mat, columns=['Ground truth', 'imu2794', 'imu2795', 'imu2796', 'imu3261', 'imu3337'], index=session_list)
    df_summ.to_csv(os.path.join(baseline_dir, 'df_summ_baseline_{}_{}.csv'.format(mode_video, mode_imu)))
    print(offsets)
コード例 #3
0
def drift_confidence(ts1, ts2):
    fftshift = cross_correlation_using_fft(ts1, ts2)
    dist = max(abs(fftshift-median(fftshift)))
    shift = compute_shift(fftshift)
    conf = dist/stdev(fftshift)
    return conf, shift
コード例 #4
0
def drift_confidence(df_resample, out_path, fps, pca=1, save_fig=0):
    """

    Args:
        df_resample:
        out_path:
        fps:
        pca:
        save_fig:

    Returns:

    """
    if pca:
        flow_key = 'diffflow_pca'
        acc_key = 'acc_pca'
    else:
        flow_key = 'diff_flowx'
        acc_key = 'accx'

    fftshift = cross_correlation_using_fft(df_resample[flow_key].values,
                                           df_resample[acc_key].values)
    dist = max(abs(fftshift - median(fftshift)))
    shift = compute_shift(fftshift)
    fx_ay_drift = shift * 1000 / fps
    fx_ay_conf = dist / stdev(fftshift)

    if save_fig:
        fig, ax = plt.subplots(2, 4, figsize=(20, 10))
        plt.subplot(2, 4, 1)
        plt.plot(df_resample['accx'])
        plt.plot(df_resample['accy'])
        plt.plot(df_resample['accz'])
        plt.title('acc x, y, z')

        plt.subplot(2, 4, 5)
        plt.plot(df_resample['diff_flowx'])
        plt.plot(df_resample['diff_flowy'])
        plt.title('diff_flow x & y')

        plt.subplot(2, 4, 2)
        fftshift = cross_correlation_using_fft(
            df_resample['diff_flowx'].values, df_resample['diff_flowy'].values)
        dist = max(abs(fftshift - median(fftshift)))
        shift = compute_shift(fftshift)
        plt.plot(fftshift)
        plt.title("\n".join(wrap('fx fy {:.1f} ms, k{:.1f}, std{:.1f}, dm{:.1f}, ndm{:.1f}'.format(\
                shift * 1000/fps, kurtosis(fftshift), stdev(fftshift), dist, dist/stdev(fftshift)), 40)))

        plt.subplot(2, 4, 3)
        fftshift = cross_correlation_using_fft(
            df_resample['diff_flowsquare'].values,
            df_resample['accsquare'].values)
        dist = max(abs(fftshift - median(fftshift)))
        shift = compute_shift(fftshift)
        plt.plot(fftshift)
        plt.title("\n".join(wrap('fsq asq {:.1f} ms, k{:.1f}, std{:.1f}, dm{:.1f}, ndm{:.1f}'.format(\
                shift * 1000/fps, kurtosis(fftshift), stdev(fftshift), dist, dist/stdev(fftshift)), 40)))

        plt.subplot(2, 4, 4)
        fftshift = cross_correlation_using_fft(
            df_resample['diff_flowx'].values, df_resample['accx'].values)
        dist = max(abs(fftshift - median(fftshift)))
        shift = compute_shift(fftshift)
        plt.plot(fftshift)
        plt.title("\n".join(wrap('fx ax {:.1f} ms, k{:.1f}, std{:.1f}, dm{:.1f}, ndm{:.1f}'.format(\
                shift * 1000/fps, kurtosis(fftshift), stdev(fftshift), dist, dist/stdev(fftshift)), 40)))

        plt.subplot(2, 4, 6)
        fftshift = cross_correlation_using_fft(
            df_resample['diff_flowy'].values, df_resample['accz'].values)
        dist = max(abs(fftshift - median(fftshift)))
        shift = compute_shift(fftshift)
        plt.plot(fftshift)
        plt.title("\n".join(wrap('fy az {:.1f} ms, k{:.1f}, std{:.1f}, dm{:.1f}, ndm{:.1f}'.format(\
                shift * 1000/fps, kurtosis(fftshift), stdev(fftshift), dist, dist/stdev(fftshift)), 40)))

        plt.subplot(2, 4, 7)
        fftshift = cross_correlation_using_fft(
            df_resample['diff_flowx'].values, df_resample['accy'].values)
        dist = max(abs(fftshift - median(fftshift)))
        shift = compute_shift(fftshift)
        plt.plot(fftshift)
        plt.title("\n".join(wrap(r'fx ay $\bf{{{:.1f}}}$ ms, k{:.1f}, std{:.1f}, dm{:.1f}, ndm$\bf{{{:.1f}}}$'.format(\
                shift * 1000/fps, kurtosis(fftshift), stdev(fftshift), dist, dist/stdev(fftshift)), 40)))

        fig.tight_layout()
        fig.subplots_adjust(top=0.8)
        plt.savefig(out_path)
        plt.close()

    return fx_ay_drift, fx_ay_conf
コード例 #5
0
def baseline_MIT_video_MD2K(window_size_sec, stride_sec, num_offsets, max_offset, window_criterion, offset_sec = 0, plot=0, use_PCA=0):
    # #subjects = ['202', '205', '211', '235', '236', '238', '240', '243']
    # dir_path = os.path.dirname(os.path.realpath(__file__))
    # # df_start_time = pd.read_csv(settings['STARTTIME_TEST_FILE'])    
    # # df_start_time = csv_read(os.path.join(dir_path, '../../Sense2StopSync/start_time.csv')).set_index('video_name')
    # df_start_time = csv_read(settings['STARTTIME_TEST_FILE']).set_index('video_name')
    # video_names = df_start_time.index.tolist()
    # subjects = list(set([vid.split(' ')[0] for vid in video_names]))    
    # fps = 29.969664
    # data_dir = '/media/yun/08790233DP/sync_data_2nd'
    # with open(data_dir+'/all_video' + title_suffix + '_info_dataset.pkl', 'rb') as handle:
    #     info_dataset = pickle.load(handle)

    # video_all = []
    # for info in info_dataset:
    #     video_all.append(info[0])
    # counter = Counter(video_all)
    # print(counter)

    # # select the qualified videos with more than 20 windows
    # qualify_videos = []
    # # set the parameter number of qualified windows 
    # qualified_window_num = 200
    # for vid in counter:
    #     if counter[vid] > qualified_window_num:
    #         qualify_videos.append(vid)
    # print(len(qualify_videos), 'videos have more than ', qualified_window_num, ' qualified windows.\n')
    # print(qualify_videos)
    df_subjects = []
    suffix = 'num_offsets{}'.format(num_offsets) if num_offsets else ''
    title_suffix = '_win{}_str{}_offset{}_rdoffset{}_maxoffset{}_wincrt{}'.\
        format(window_size_sec, stride_sec, offset_sec, num_offsets, max_offset, window_criterion)
    data_dir = './'

    df_start_time = pd.read_csv(settings['STARTTIME_TEST_FILE'])
    qualify_videos  = df_start_time["video_name"].tolist()
    print(qualify_videos)
    print(len(qualify_videos))
    subjects = list(set([vid[:3] for vid in qualify_videos]))
    print(subjects)
    print(len(subjects))
    fps = settings['FPS']

    dir_path = './'

    DEVICE = 'CHEST'
    SENSOR = 'ACCELEROMETER'
    SENSORS = ['ACCELEROMETER_X', 'ACCELEROMETER_Y', 'ACCELEROMETER_Z']
    sensor_col_header = ['accx', 'accy', 'accz']
    start_time_file = settings['STARTTIME_FILE']
    RAW_PATH = settings['raw_path']
    RESAMPLE_PATH = settings['reliability_resample_path']

    create_folder("result/baseline_pca")

    for sub in subjects:
        DEVICE = 'CHEST'
        SENSOR = 'ACCELEROMETER'
        SENSORS = ['ACCELEROMETER_X', 'ACCELEROMETER_Y', 'ACCELEROMETER_Z']
        sensor_col_header = ['accx', 'accy', 'accz']

        # start_time_file = os.path.join(dir_path, '../../Sense2StopSync/start_time.csv')
        start_time_file = settings["STARTTIME_TEST_FILE"]

        flow_dir = os.path.join(dir_path, '../../data/flow_pwc/sub{}'.format(sub))
        RAW_PATH = os.path.join(dir_path, '../../data/RAW/wild/')
        RESAMPLE_PATH = os.path.join(dir_path, '../../data/RESAMPLE200/wild/')

        flow_files = [f for f in os.listdir(flow_dir) if os.path.isfile(os.path.join(flow_dir, f))]
        flow_files = [f for f in flow_files if f.endswith('.pkl')]
        print('subject', sub, ': ', len(flow_files), 'total videos (including unqualified ones)')

        video_list = []
        offset_list = []

        for f in flow_files:
            vid_name = f[:-4]
            if vid_name not in qualify_videos:
                # print(vid_name, 'flow file not exist')
                continue
            vid_path = os.path.join(flow_dir, vid_name+'.pkl')
            out_path = os.path.join(data_dir, 'figures/figures_MIT_pca/corr_flow_averaged_acc_{}.png'.format(vid_name))
            
            
            # load start end time
            offset = 0
            df_start_time = pd.read_csv(start_time_file, index_col='video_name')
            if vid_name not in df_start_time.index:
                # print(vid_name, 'not exist in starttime csv')
                continue
            print(vid_name)
            start_time = df_start_time.loc[vid_name]['start_time']+offset
            
            # load optical flow data and assign unixtime to each frame
            motion = pickle.load(open(vid_path, 'rb'))
            # step = 1000.0/30.0
            step = 1000.0/fps
            length = motion.shape[0]
            timestamps_int = np.arange(start_time, start_time + length * step, step).astype(int)

            # # load sensor data
            # interval = [int(start_time), int(start_time) + length * step]
            # df = read_data_datefolder_hourfile(RESAMPLE_PATH, sub, DEVICE, SENSOR, *interval)
            # len_raw_sensor = len(df)

            # # load sensor reliability data
            # df_rel = read_data_datefolder_hourfile(RESAMPLE_PATH, sub, DEVICE, SENSOR + '_reliability', *interval)
            # # use the threshold ">=8Hz" to select 'good' seconds
            # rel_seconds = df_rel[df_rel['SampleCounts'] > 7].sort_values(by='Time')['Time'].values

            

            timestamps_int = timestamps_int[:min(len(timestamps_int), motion.shape[0])]
            motion = motion[:min(len(timestamps_int), motion.shape[0]), :]
            assert len(timestamps_int) == motion.shape[0]
            df_flow = pd.DataFrame({'time': timestamps_int, 'flowx': motion[:, 0], 'flowy': motion[:, 1]})
            df_flow['second'] = (df_flow['time']/1000).astype(int)

            # # extract the optical flow frames of the good seconds according to sensor data
            # df_flow_rel = pd.concat([df_flow[df_flow['second']==i] for i in rel_seconds]).reset_index()
            
            ## remove/keep video based on data quality
            # print(len(df_flow_rel)/len(df_flow))
            # if len(df_flow_rel)/len(df_flow) < 0.7:
                # continue

            fixedTimeCol = df_flow['time'].values

            if CUBIC:
                df_flow['time'] = pd.to_datetime(df_flow['time'], unit='ms')
            df_flow = df_flow[['flowx', 'flowy', 'time']].set_index('time')
            
            # extract the data of consecutive chunk and resample according to video frame timestamp
            df_list = []
            for S, col in zip(SENSORS, sensor_col_header):
                df = read_data_datefolder_hourfile(RAW_PATH, sub, DEVICE, S, fixedTimeCol[0], fixedTimeCol[-1])
                df = df[['time', col]]

                if CUBIC == 0:
                    df_sensor_resample = resample(df, 'time', samplingRate=0,
                                                  gapTolerance=200, fixedTimeColumn=fixedTimeCol).set_index('time')
                else:                        
                    df["time"] = pd.to_datetime(df["time"], unit="ms")
                    df = df.set_index("time")
                    df_sensor_resample = df.resample(FRAME_INTERVAL).mean()  
                    # FRAME_INTERVAL as 0.03336707S is the most closest value to 1/29.969664 pandas accepts
                    df_sensor_resample = df_sensor_resample.interpolate(method="spline", order=3) # cubic spline interpolation

                df_list.append(df_sensor_resample)
            
            if CUBIC == 0:
                df_list.append(df_flow)
                df_resample = pd.concat(df_list, axis=1)
            else:
                df_sensors = pd.concat(df_list, axis=1)
                df_list = [df_sensors, df_flow]
                df_resample = pd.merge_asof(df_list[1], df_list[0], on='time', tolerance=pd.Timedelta("30ms"), \
                direction='nearest').set_index('time')

            
            # two method to fill na values
            # 1 fill with 0
            #df_resample = df_resample.fillna(0)

            # 2 ffill
            df_resample = df_resample.fillna(method='ffill')
            df_resample = df_resample.dropna(how='any')

            df_resample['diff_flowx'] = df_resample['flowx'].diff()
            df_resample['diff_flowy'] = df_resample['flowy'].diff()
            df_resample = df_resample.dropna(how='any')
            pca_sensor = PCA(n_components=1)
            df_resample[['accx', 'accy', 'accz']] -= df_resample[['accx', 'accy', 'accz']].mean()
            df_resample['acc_pca'] = pca_sensor.fit_transform(df_resample[['accx', 'accy', 'accz']].to_numpy())
            diffflow_mat = df_resample[['diff_flowx', 'diff_flowy']].to_numpy()
            diffflow_mat -= np.mean(diffflow_mat, axis=0)
            pca_diffflow = PCA(n_components=1)
            df_resample['diffflow_pca'] = pca_diffflow.fit_transform(diffflow_mat)

            if use_PCA == 1:
                fftshift = cross_correlation_using_fft(df_resample['diffflow_pca'].values, df_resample['acc_pca'].values)
            else:
                fftshift = cross_correlation_using_fft(df_resample['diff_flowx'].values, df_resample['accx'].values)
            shift = compute_shift(fftshift)
            shift_ms = shift * 1000 / fps
            # print('diff_flowx accx delta={:.1f} ms'.format(shift_ms))
            video_list.append(vid_name)
            offset_list.append(shift_ms)
            print(vid_name, shift_ms)

            if plot:
                plot_MIT_baseline(df_resample, fps, out_path)

        df_subj = pd.DataFrame({'video': video_list, 'offset': offset_list})
        # print((df_subj))
        df_subjects.append(df_subj)
    result_df = pd.concat(df_subjects)
    result_df = result_df.reset_index()
    ave_error = np.mean(np.abs(result_df['offset'].values))
    PV300 = np.sum(np.abs(result_df['offset'].values) < 300) / len(result_df) * 100
    PV700 = np.sum(np.abs(result_df['offset'].values) < 700) / len(result_df) * 100
    result_df.to_csv(os.path.join(data_dir, 'result/baseline_pca/baseline_MIT_entirevideo_MD2K_offset_pad' + title_suffix + '.csv'), index=None)
    print("ave_error", "PV300", "PV700")
    print(ave_error, PV300, PV700)
    print("# videos: ", len(result_df))
    return result_df