コード例 #1
0
    def test_turn_location_position(self):
        # Human assisted test
        # Ensure that the car returns to the same location if we drive around in circles.

        import visualizer
        pose_estimator = PoseEstimator((0, 0, 0), 0, 0, 0, 0)
        d = 4
        steering_angle_radians = np.pi / d
        outer_turn_radius_meters = pose_estimator.calc_outer_turn_radius(
            steering_angle_radians)
        front_wheel_turn_circumference = 2 * np.pi * config.FRONT_WHEEL_RADIUS_METERS
        turn_circle_circumference = 2 * np.pi * outer_turn_radius_meters
        ticks_required = config.ENCODER_RESOLUTION_FRONT_WHEEL * turn_circle_circumference / front_wheel_turn_circumference
        result_loc = []
        ticks = 0
        for i in range(2 * d):
            result_loc.append(
                pose_estimator.estimate(time=i,
                                        steering_angle=steering_angle_radians,
                                        encoder_ticks=ticks,
                                        angular_velocity=0))
            ticks = ticks + ticks_required / (2 * d)
        for loc in result_loc:
            visualizer.draw_car(loc[0], loc[1], loc[2])
        visualizer.show()
コード例 #2
0
ファイル: main.py プロジェクト: midhun-pm/CodeSample
def main():
    # Reads from the data file and runs estimate for each row
    # Then plots the trajectory
    data_array = util.read_csv(config.DATASET_ABSOLUTE_PATH)

    row = data_array[0]
    time, encoder, angular_velocity, steering_angle = np.ravel(row)
    resulting_pos_heading = []
    pose_estimator = PoseEstimator((0, 0, 0), time, encoder, angular_velocity,
                                   steering_angle)
    i = 1
    while i < len(data_array):
        row = data_array[i]
        time, encoder, angular_velocity, steering_angle = np.ravel(row)
        x, y, heading = pose_estimator.estimate(
            time=time,
            steering_angle=steering_angle,
            encoder_ticks=encoder,
            angular_velocity=angular_velocity)
        resulting_pos_heading.append([x, y, heading])
        i = i + 1
    visualizer.plot_points(
        np.asarray(resulting_pos_heading)[:, 0],
        np.asarray(resulting_pos_heading)[:, 1])
    visualizer.show()
コード例 #3
0
        for _ in range(10):
            a = time.time()
            sess.run([
                net.get_output(
                    name=last_layer.format(stage=args.stage_level, aux=1)),
                net.get_output(
                    name=last_layer.format(stage=args.stage_level, aux=2))
            ],
                     feed_dict={'image:0': [image]})
            logging.info('inference- elapsed_time={}'.format(time.time() - a))
            avg += time.time() - a
        logging.info('prediction avg= %f' % (avg / 10))

        logging.info('pose+')
        a = time.time()
        humans = PoseEstimator.estimate(heatMat, pafMat)
        logging.info('pose- elapsed_time={}'.format(time.time() - a))
        for human in humans:
            res = write_coco_json(human, args.input_width, args.input_height)
            print(res)

        logging.info('image={} heatMap={} pafMat={}'.format(
            image.shape, heatMat.shape, pafMat.shape))
        process_img = CocoPose.display_image(image,
                                             heatMat,
                                             pafMat,
                                             as_numpy=True)

        # display
        image = cv2.imread(args.imgpath)
        image_h, image_w = image.shape[:2]
コード例 #4
0
 def test_straight_location_estimation(self):
     # Testing the position for straight line motion
     pose_estimator = PoseEstimator((0, 0, 0), 0, 0, 0, 0)
     pose_estimator.estimate(0, 0, 0, 0)
     self.assertEqual((2 * np.pi * config.FRONT_WHEEL_RADIUS_METERS, 0, 0),
                      pose_estimator.estimate(1, 0, 512, 10))