def test_one_epoch(sess, ops_L, templates, shuffled_poses, saver, model_path):
    # Arguments:
    # sess: 		Tensorflow session to handle tensors.
    # ops_L:		Dictionary for tensors of Network_L
    # ops19: 		Dictionary for tensors of Network19
    # templates:	Training Point Cloud data.
    # poses: 		Training pose data.
    # saver: 		To restore the weights.
    # model_path: 	Path of log directory.

    saver.restore(sess, model_path)  # Restore the weights of trained network.

    is_training = False
    display_ptClouds = False
    display_poses = False
    display_poses_in_itr = False
    display_ptClouds_in_itr = False
    swap_case = False

    templates = helper.process_templates('templates')
    template_data = np.zeros((BATCH_SIZE, MAX_NUM_POINT,
                              3))  # Extract Templates for batch training.
    for i in range(BATCH_SIZE):
        template_data[i, :, :] = np.copy(templates[1, :, :])
    batch_euler_poses = shuffled_poses[0].reshape(
        (1, 6))  # Extract poses for batch training.

    # Self defined test case.
    batch_euler_poses[0] = [
        0.5, 0.0, 0.2, 50 * (np.pi / 180), 0 * (np.pi / 180),
        10 * (np.pi / 180)
    ]
    source_data = helper.apply_transformation(
        template_data, batch_euler_poses
    )  # Apply the poses on the templates to get source data.

    # Only chose limited number of points from the source and template data.
    template_data = template_data[:, 0:NUM_POINT, :]
    source_data = source_data[:, 0:NUM_POINT, :]

    if swap_case:
        source_data, template_data = template_data, source_data  # Swap the template and source.
        transformation_template2source = helper.transformation(
            batch_euler_poses)
        transformation_source2template = np.linalg.inv(
            transformation_template2source[0])
        [euler_z, euler_y,
         euler_x] = t3d.mat2euler(transformation_source2template[0:3, 0:3],
                                  'szyx')
        trans_x = transformation_source2template[0, 3]
        trans_y = transformation_source2template[1, 3]
        trans_z = transformation_source2template[2, 3]
        pose_source2template = [
            trans_x, trans_y, trans_z, euler_x * (18 / np.pi),
            euler_y * (180 / np.pi), euler_z * (180 / np.pi)
        ]
        batch_euler_poses[0] = pose_source2template

    TEMPLATE_DATA = np.copy(
        template_data)  # Store the initial template to visualize results.
    SOURCE_DATA = np.copy(
        source_data)  # Store the initial source to visualize results.

    # To visualize the source and point clouds:
    if display_ptClouds:
        helper.display_clouds_data(source_data[0])
        helper.display_clouds_data(template_data[0])

    # Subtract the Centroids from the Point Clouds.
    if centroid_subtraction_switch:
        source_data, template_data, centroid_translation_pose = helper.centroid_subtraction(
            source_data, template_data)

    TRANSFORMATIONS = np.identity(
        4)  # Initialize identity transformation matrix.
    TRANSFORMATIONS = np.matlib.repmat(TRANSFORMATIONS, BATCH_SIZE, 1).reshape(
        BATCH_SIZE, 4,
        4)  # Intialize identity matrices of size equal to batch_size

    # Feed the placeholders of Network_L with source data and template data obtained from N-Iterations.
    feed_dict = {
        ops_L['source_pointclouds_pl']: source_data,
        ops_L['template_pointclouds_pl']: template_data,
        ops_L['is_training_pl']: is_training
    }

    # Ask the network to predict transformation, calculate loss using distance between actual points.
    import time
    start = time.time()
    step, predicted_transformation = sess.run(
        [ops_L['step'], ops_L['predicted_transformation']],
        feed_dict=feed_dict)
    end = time.time()
    print(end - start)

    # Apply the final transformation on the template data and multiply it with the transformation matrix obtained from N-Iterations.
    TRANSFORMATIONS, template_data = helper.transformation_quat2mat(
        predicted_transformation, TRANSFORMATIONS, template_data)

    if centroid_subtraction_switch:  # If centroid is subtracted then apply the centorid translation back to point clouds.
        TRANSFORMATIONS, template_data = helper.transformation_quat2mat(
            centroid_translation_pose, TRANSFORMATIONS, template_data)

    final_pose = helper.find_final_pose(TRANSFORMATIONS)

    if not swap_case:
        title = "Actual T (Red->Green): "
        for i in range(len(batch_euler_poses[0])):
            if i > 2:
                title += str(round(batch_euler_poses[0][i] * (180 / np.pi), 2))
            else:
                title += str(batch_euler_poses[0][i])
            title += ', '
        title += "\nPredicted T (Red->Blue): "
        for i in range(len(final_pose[0])):
            if i > 2:
                title += str(round(final_pose[0, i] * (180 / np.pi), 3))
            else:
                title += str(round(final_pose[0, i], 3))
            title += ', '
    else:
        title = "Predicted T (Red->Blue): "
        for i in range(len(final_pose[0])):
            if i > 2:
                title += str(round(final_pose[0, i] * (180 / np.pi), 3))
            else:
                title += str(round(final_pose[0, i], 3))
            title += ', '

    # Display the ground truth pose and predicted pose for first Point Cloud in batch
    if display_poses:
        print('Ground Truth Position: {}'.format(
            batch_euler_poses[0, 0:3].tolist()))
        print('Predicted Position: {}'.format(final_pose[0, 0:3].tolist()))
        print('Ground Truth Orientation: {}'.format(
            (batch_euler_poses[0, 3:6] * (180 / np.pi)).tolist()))
        print('Predicted Orientation: {}'.format(
            (final_pose[0, 3:6] * (180 / np.pi)).tolist()))

    helper.display_three_clouds(TEMPLATE_DATA[0], SOURCE_DATA[0],
                                template_data[0], title)

    print("Loss: {}".format(loss_val))
Beispiel #2
0
                quality=100)
    plt.figure()
    plt.hist(Trans_Err, np.arange(0, 1.01, 0.01))
    plt.xlim(0, 1)
    plt.savefig(os.path.join(LOG_DIR, 'trans_err_hist.jpeg'),
                dpi=500,
                quality=100)


if __name__ == '__main__':
    # a = np.array([[0,0,0,0,0,0],[0,0,0,90,0,0]])
    # print a.shape
    # a = poses_euler2quat(a)
    # print(a[1,3]*a[1,3]+a[1,4]*a[1,4]+a[1,5]*a[1,5]+a[1,6]*a[1,6])
    # print(a[0,3]*a[0,3]+a[0,4]*a[0,4]+a[0,5]*a[0,5]+a[0,6]*a[0,6])
    # print a.shape
    # display_clouds('airplane_templates.csv',0)

    templates = helper.process_templates('multi_model_templates')
    # templates = helper.process_templates('templates')
    # airplane = templates[0,:,:]
    idx = 199
    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    # start = idx*2048
    # end = (idx+1)*2048
    ax.scatter(templates[idx, :, 0], templates[idx, :, 1], templates[idx, :,
                                                                     2])
    plt.show()
    print(templates.shape)