Example #1
0
def main(args):
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    args.disable_cache = True  # we do not want to load stale data here!
    path = args.dataset
    if not os.path.exists(path):
        exit('dataset at path "%s" does not exist' % path)

    # check and export-all does not need the loaded dataset
    if args.action == 'check':
        check(args)
        return
    elif args.action == 'export-all':
        export_all(args)
        return

    print('loading data set "%s" ...' % path)
    dataset = data.load_dataset_from_args(args)

    action_func = None
    if args.action == 'report':
        action_func = report
    elif args.action == 'export':
        action_func = export
    elif args.action == 'plot':
        action_func = plot
    assert action_func is not None
    action_func(dataset, args)
Example #2
0
def main(args):
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG)
    args.disable_cache = True  # we do not want to load stale data here!
    path = args.dataset
    if not os.path.exists(path):
        exit('dataset at path "%s" does not exist' % path)

    # check and export-all does not need the loaded dataset
    if args.action == 'check':
        check(args)
        return
    elif args.action == 'export-all':
        export_all(args)
        return

    print('loading data set "%s" ...' % path)
    dataset = data.load_dataset_from_args(args)

    action_func = None
    if args.action == 'report':
        action_func = report
    elif args.action == 'export':
        action_func = export
    elif args.action == 'plot':
        action_func = plot
    assert action_func is not None
    action_func(dataset, args)
Example #3
0
def merge(args):
    # Merges two datasets into one
    dataset1 = data.load_dataset_from_args(args.dataset)
    dataset2 = data.load_dataset_from_args(args.other_dataset)
    dataset1.merge_with_dataset(dataset2)
Example #4
0
def export_all(args):
    # Load MMM dataset WITHOUT normalization
    args.motion_type = 'mmm-nlopt'
    args.disable_normalization = True
    args.disable_smoothing = True
    print('Loading MMM data without normalization ...')
    mmm = data.load_dataset_from_args(args)
    print('done, %d motions and %d features loaded' %
          (mmm.n_samples, mmm.n_features))
    print('')

    # Load MMM dataset WITH normalization
    args.motion_type = 'mmm-nlopt'
    args.disable_normalization = False
    args.disable_smoothing = True
    print('Loading MMM data with normalization ...')
    normalized_mmm = data.load_dataset_from_args(args)
    normalized_mmm.feature_names = [
        'normalized_' + name for name in normalized_mmm.feature_names
    ]
    print('done, %d motions and %d features loaded' %
          (normalized_mmm.n_samples, normalized_mmm.n_features))
    print('')

    # Load Vicon dataset
    args.motion_type = 'vicon'
    args.disable_normalization = True
    args.disable_smoothing = True
    print('Loading vicon data without normalization ...')
    vicon = data.load_dataset_from_args(args)
    print('done, %d motions and %d features loaded' %
          (vicon.n_samples, vicon.n_features))
    print('')

    # Manually perform normalization on Vicon using the root_rot and root_pos information
    print('Performing normalization on vicon dataset ...')
    normalized_vicon = vicon.copy()
    root_pos_indexes = mmm.indexes_for_feature('root_pos')
    root_rot_indexes = mmm.indexes_for_feature('root_rot')
    for sample_idx in xrange(normalized_vicon.n_samples):
        X_curr = normalized_vicon.X[sample_idx]
        n_frames = X_curr.shape[0]
        marker_pos_indexes = normalized_vicon.indexes_for_feature('marker_pos')
        assert len(marker_pos_indexes) % 3 == 0
        n_markers = len(marker_pos_indexes) / 3

        root_pos0 = mmm.X[sample_idx][
            0, root_pos_indexes]  # do not use the normalized dataset here!
        root_rot = mmm.X[
            sample_idx][:,
                        root_rot_indexes]  # do not use the normalized dataset here!
        assert root_rot.shape == (n_frames, 3)

        # Normalize marker positions
        root_pose0_inv = np.linalg.inv(
            pose_matrix(root_pos0, (0., 0., root_rot[0][2])))
        for marker_idx in xrange(n_markers):
            start_idx = marker_pos_indexes[0] + marker_idx * 3
            end_idx = start_idx + 3
            marker_pos = X_curr[:, start_idx:end_idx]
            n_samples = marker_pos.shape[0]
            marker_pos_plus_one = np.hstack((marker_pos, np.ones(
                (n_samples, 1))))
            assert marker_pos_plus_one.shape == (n_samples, 4)
            normalized_marker_pos = np.dot(root_pose0_inv,
                                           marker_pos_plus_one.T).T[:, 0:3]
            assert normalized_marker_pos.shape == marker_pos.shape
            X_curr[:, start_idx:end_idx] = normalized_marker_pos

        # Normalize velocities and accelerations
        marker_vel_indexes = normalized_vicon.indexes_for_feature('marker_vel')
        marker_acc_indexes = normalized_vicon.indexes_for_feature('marker_acc')
        for idx in xrange(n_frames):
            root_rot_inv = np.linalg.inv(
                rotation_matrix(root_rot[idx][0], root_rot[idx][1],
                                root_rot[idx][2]))
            for marker_idx in xrange(n_markers):
                vel_start_idx = marker_vel_indexes[0] + marker_idx * 3
                vel_end_idx = vel_start_idx + 3
                vel = X_curr[idx, vel_start_idx:vel_end_idx]
                X_curr[idx,
                       vel_start_idx:vel_end_idx] = np.dot(root_rot_inv, vel)

                acc_start_idx = marker_acc_indexes[0] + marker_idx * 3
                acc_end_idx = acc_start_idx + 3
                acc = X_curr[idx, acc_start_idx:acc_end_idx]
                X_curr[idx,
                       acc_start_idx:acc_end_idx] = np.dot(root_rot_inv, acc)
        # No need to normalize marker_vel_norm and marker_acc_norm
    normalized_vicon.feature_names = [
        'normalized_' + name for name in normalized_vicon.feature_names
    ]
    print('done, %d motions and %d features loaded' %
          (normalized_vicon.n_samples, normalized_vicon.n_features))
    print('')

    # Merge all datasets into one
    print('merging datasets ...')
    final_dataset = mmm.copy()
    final_dataset.merge_with_dataset(normalized_mmm)
    final_dataset.merge_with_dataset(vicon)
    final_dataset.merge_with_dataset(normalized_vicon)
    assert final_dataset.feature_names == mmm.feature_names + normalized_mmm.feature_names + vicon.feature_names + normalized_vicon.feature_names
    assert final_dataset.n_features == mmm.n_features + normalized_mmm.n_features + vicon.n_features + normalized_vicon.n_features
    print('done, %d motions and %d features' %
          (final_dataset.n_samples, final_dataset.n_features))
    print('')

    if args.output is not None:
        print('saving dataset ...')
        with open(args.output, 'wb') as f:
            pickle.dump(final_dataset, f)
        print('done')
Example #5
0
def merge(args):
    # Merges two datasets into one
    dataset1 = data.load_dataset_from_args(args.dataset)
    dataset2 = data.load_dataset_from_args(args.other_dataset)
    dataset1.merge_with_dataset(dataset2)
Example #6
0
def export_all(args):
    # Load MMM dataset WITHOUT normalization
    args.motion_type = 'mmm-nlopt'
    args.disable_normalization = True
    args.disable_smoothing = True
    print('Loading MMM data without normalization ...')
    mmm = data.load_dataset_from_args(args)
    print('done, %d motions and %d features loaded' % (mmm.n_samples, mmm.n_features))
    print('')

    # Load MMM dataset WITH normalization
    args.motion_type = 'mmm-nlopt'
    args.disable_normalization = False
    args.disable_smoothing = True
    print('Loading MMM data with normalization ...')
    normalized_mmm = data.load_dataset_from_args(args)
    normalized_mmm.feature_names = ['normalized_' + name for name in normalized_mmm.feature_names]
    print('done, %d motions and %d features loaded' % (normalized_mmm.n_samples, normalized_mmm.n_features))
    print('')

    # Load Vicon dataset
    args.motion_type = 'vicon'
    args.disable_normalization = True
    args.disable_smoothing = True
    print('Loading vicon data without normalization ...')
    vicon = data.load_dataset_from_args(args)
    print('done, %d motions and %d features loaded' % (vicon.n_samples, vicon.n_features))
    print('')

    # Manually perform normalization on Vicon using the root_rot and root_pos information
    print('Performing normalization on vicon dataset ...')
    normalized_vicon = vicon.copy()
    root_pos_indexes = mmm.indexes_for_feature('root_pos')
    root_rot_indexes = mmm.indexes_for_feature('root_rot')
    for sample_idx in xrange(normalized_vicon.n_samples):
        X_curr = normalized_vicon.X[sample_idx]
        n_frames = X_curr.shape[0]
        marker_pos_indexes = normalized_vicon.indexes_for_feature('marker_pos')
        assert len(marker_pos_indexes) % 3 == 0
        n_markers = len(marker_pos_indexes) / 3

        root_pos0 = mmm.X[sample_idx][0, root_pos_indexes]  # do not use the normalized dataset here!
        root_rot = mmm.X[sample_idx][:, root_rot_indexes]   # do not use the normalized dataset here!
        assert root_rot.shape == (n_frames, 3)

        # Normalize marker positions
        root_pose0_inv = np.linalg.inv(pose_matrix(root_pos0, (0., 0., root_rot[0][2])))
        for marker_idx in xrange(n_markers):
            start_idx = marker_pos_indexes[0] + marker_idx * 3
            end_idx = start_idx + 3
            marker_pos = X_curr[:, start_idx:end_idx]
            n_samples = marker_pos.shape[0]
            marker_pos_plus_one = np.hstack((marker_pos, np.ones((n_samples, 1))))
            assert marker_pos_plus_one.shape == (n_samples, 4)
            normalized_marker_pos = np.dot(root_pose0_inv, marker_pos_plus_one.T).T[:, 0:3]
            assert normalized_marker_pos.shape == marker_pos.shape
            X_curr[:, start_idx:end_idx] = normalized_marker_pos

        # Normalize velocities and accelerations
        marker_vel_indexes = normalized_vicon.indexes_for_feature('marker_vel')
        marker_acc_indexes = normalized_vicon.indexes_for_feature('marker_acc')
        for idx in xrange(n_frames):
            root_rot_inv = np.linalg.inv(rotation_matrix(root_rot[idx][0], root_rot[idx][1], root_rot[idx][2]))
            for marker_idx in xrange(n_markers):
                vel_start_idx = marker_vel_indexes[0] + marker_idx * 3
                vel_end_idx = vel_start_idx + 3
                vel = X_curr[idx, vel_start_idx:vel_end_idx]
                X_curr[idx, vel_start_idx:vel_end_idx] = np.dot(root_rot_inv, vel)

                acc_start_idx = marker_acc_indexes[0] + marker_idx * 3
                acc_end_idx = acc_start_idx + 3
                acc = X_curr[idx, acc_start_idx:acc_end_idx]
                X_curr[idx, acc_start_idx:acc_end_idx] = np.dot(root_rot_inv, acc)
        # No need to normalize marker_vel_norm and marker_acc_norm
    normalized_vicon.feature_names = ['normalized_' + name for name in normalized_vicon.feature_names]
    print('done, %d motions and %d features loaded' % (normalized_vicon.n_samples, normalized_vicon.n_features))
    print('')

    # Merge all datasets into one
    print('merging datasets ...')
    final_dataset = mmm.copy()
    final_dataset.merge_with_dataset(normalized_mmm)
    final_dataset.merge_with_dataset(vicon)
    final_dataset.merge_with_dataset(normalized_vicon)
    assert final_dataset.feature_names == mmm.feature_names + normalized_mmm.feature_names + vicon.feature_names + normalized_vicon.feature_names
    assert final_dataset.n_features == mmm.n_features + normalized_mmm.n_features + vicon.n_features + normalized_vicon.n_features
    print('done, %d motions and %d features' % (final_dataset.n_samples, final_dataset.n_features))
    print('')

    if args.output is not None:
        print('saving dataset ...')
        with open(args.output, 'wb') as f:
            pickle.dump(final_dataset, f)
        print('done')