Esempio n. 1
0
    def react(self, this_robot, sensor_dump, visualize=False):
        twist = Twist()

        image = sensor_dump.lower_image

        # Relying on only a single forward-pointing ray causes distant pucks
        # to be easily missed (they may be detected, but too sporadically to
        # attract the robot).  We accept as forward-pointing any sensor ray
        # within 'front_angle_threshold' of zero.  Correspondingly set the
        # two predicates 'react_to_puck' and 'react_to_robot'.
        react_to_puck = False
        react_to_robot = False
        for i in range(image.n_rows):
            angle = image.calib_array[i, 4]
            dist = image.calib_array[i, 5]
            if fabs(angle) < self.front_angle_threshold:
                if image.masks[i] & self.puck_mask != 0:
                    react_to_puck = True
                if image.masks[i] == ROBOT_MASK:
                    react_to_robot = True

        if react_to_robot:
            react_to_puck = False

        # Now react...
        if react_to_puck:
            # Turn left
            twist.linear = self.linear_speed
            twist.angular = self.angular_speed
            if visualize:
                draw_ray(this_robot, 0, (255, 0, 255))

        elif react_to_robot:
            # Turn left and slow
            twist.linear = self.linear_speed * self.slow_factor
            twist.angular = self.angular_speed

            if visualize:  # Reacting to robot
                draw_ray(this_robot, 0, (255, 0, 0))
        else:
            # Turn right
            twist.linear = self.linear_speed
            twist.angular = -self.angular_speed

            if visualize:
                draw_ray(this_robot, 0, (0, 255, 0))

        return twist
Esempio n. 2
0
    def react(self, lscan):

        (cl_dist, cl_angle, cl_mask) = self.get_lmark_dist_angle_mask(lscan)

        draw_circle(self.body.position, 2, (255, 255, 255))

        if cl_angle != None and cl_dist > 0:
            if cl_mask == ARC_LANDMARK_MASK:
                draw_ray(self, cl_angle, (0, 255, 0), 20, 1)
            elif cl_mask == BLAST_LANDMARK_MASK:
                draw_ray(self, cl_angle + pi, (255, 0, 0), 20, 1)
            elif (cl_mask == POLE_LANDMARK_MASK):

                # We'll average all pole landmark responses.
                vx = 0
                vy = 0
                for i in range(len(lscan.ranges)):
                    if (lscan.masks[i] & POLE_LANDMARK_MASK != 0):
                        vx += cos(lscan.angles[i])
                        vy += sin(lscan.angles[i])
                combined_angle = atan2(vy, vx)

                draw_ray(self, combined_angle, (0, 0, 255), 20, 1)
Esempio n. 3
0
    def act(self, robot, sensor_dump, visualize=False):
        """ Set wheel speeds according to processed sensor data. """

        image = sensor_dump.lower_image

        # Check if there are no landmarks in view, but there are wall pixels we
        # can use for guidance.
        react_to_wall = False
        react_to_wall_angle = None
        if self.guidance_vec == None:
            closest_wall_angle = None
            closest_wall_range = float('inf')
            for i in range(image.n_rows):
                if image.masks[i] == WALL_MASK:
                    wall_angle = image.calib_array[i, 4]
                    wall_range = image.calib_array[i, 5]
                    if wall_range < closest_wall_range:
                        closest_wall_range = wall_range
                        react_to_wall_angle = wall_angle
                        react_to_wall = True

        # Check for the presence of another robot in the centre of the image.
        react_to_robot = False
        react_to_robot_angle = None
        for i in range(image.n_rows):
            if image.masks[i] == ROBOT_MASK:
                (xr, yr) = image.calib_array[i, 2], image.calib_array[i, 3]
                bot_a = image.calib_array[i, 4]
                bot_r = image.calib_array[i, 5]

                if fabs(bot_a) < pi / 4 and bot_r < self.robot_react_range:
                    react_to_robot = True
                    react_to_robot_angle = bot_a

        twist = Twist()
        if react_to_robot:
            # Turn slow in the open direction
            twist.linear = self.slow_factor * self.linear_speed
            twist.angular = self.angular_speed
            if visualize:
                draw_ray(robot, react_to_robot_angle, color=(255, 0, 0))

        elif react_to_wall:
            if visualize:
                draw_ray(robot, react_to_wall_angle, color=(255, 255, 0))
            twist = self.curve_to_angle(pi / 2, react_to_wall_angle)

        elif self.guidance_vec == None:
            # No landmarks in view AND no walls in view.  Just go straight.
            twist.linear = self.linear_speed

        elif (self.state == "ODO_HOME"
              or (self.odo_condition == "NO_ODO"
                  and self.target_range_bearing != None)):
            (x, y, theta) = sensor_dump.odo_pose
            c = cos(theta)
            s = sin(theta)
            goal_Rx = c * (self.odo_goal_x - x) + s * (self.odo_goal_y - y)
            goal_Ry = -s * (self.odo_goal_x - x) + c * (self.odo_goal_y - y)

            #twist.linear = 0.1 * goal_Rx
            #twist.angular = 0.2 * goal_Ry
            twist.linear = self.linear_speed * sign(goal_Rx)
            twist.angular = self.angular_speed * sign(goal_Ry)

            if visualize:
                draw_segment_wrt_robot(robot, (0, 0), (goal_Rx, goal_Ry),
                                       color=(255, 0, 255),
                                       width=3)

        else:
            twist = self.curve_to_angle(-pi / 2, self.guidance_angle)

        return twist
Esempio n. 4
0
    def act(self, robot, sensor_dump, visualize=False):
        """ Set wheel speeds according to processed sensor data. """

        image = sensor_dump.lower_image

        # Check if there are no landmarks in view, but there are wall pixels we
        # can use for guidance.
        react_to_wall = False
        react_to_wall_angle = None
#        if self.guidance_vec == None:
        closest_wall_angle = None
        closest_wall_range = self.wall_react_range
        for i in range(image.n_rows):
            if image.masks[i] == WALL_MASK:
                wall_angle = image.calib_array[i,4]
                wall_range = image.calib_array[i,5]
                if wall_range < closest_wall_range:
                    closest_wall_range = wall_range
                    react_to_wall_angle = wall_angle
                    react_to_wall = True

        # Check for the presence of another robot in the centre of the image.
        react_to_robot = False
        react_to_robot_angle = None
        for i in range(image.n_rows):
            if image.masks[i] == ROBOT_MASK:
                (xr, yr) = image.calib_array[i,2], image.calib_array[i,3]
                bot_a = image.calib_array[i,4]
                bot_r = image.calib_array[i,5]

                if fabs(bot_a) < pi/4 and bot_r < self.robot_react_range:
                    react_to_robot = True
                    react_to_robot_angle = bot_a

        # Quantity referenced in a couple of places below:
        if self.guidance_vec != None:
            dist_to_lmarks = sqrt(self.guidance_vec[0]**2 + 
                                  self.guidance_vec[1]**2)

        twist = Twist()
        action_print = None
        if react_to_robot:
            # Turn slow in the open direction
            twist.linear = self.slow_factor * self.linear_speed
            twist.angular = self.angular_speed
            action_print = "ROBOT"
            if visualize:
                draw_ray(robot, react_to_robot_angle, color=(255,0,0))

        elif react_to_wall:
            action_print = "WALL"
            if visualize:
                draw_ray(robot, react_to_wall_angle, color=(255,0,255))
            #twist = self.curve_to_angle(pi/2, react_to_wall_angle, self.wall_turn_on_spot)
            twist.linear = uniform(-0.1, 0.1) * self.linear_speed
            twist.angular = -self.angular_speed

        elif self.guidance_vec == None:
            # No landmarks in view AND no walls in view.  Just go straight.
            twist.linear = self.linear_speed
            action_print = "STRAIGHT (NO LANDMARKS OR WALLS IN VIEW)"

        elif self.target_pos != None and dist_to_lmarks > self.puck_dist_threshold:
            ##twist.linear = self.linear_speed * sign(self.target_pos[0])
            ##twist.angular = self.angular_speed * sign(self.target_pos[1])

            #twist.linear = self.linear_speed * 1.0 * self.target_pos[0]
            #twist.angular = self.angular_speed * 20.0 * self.target_pos[1]
            #twist = self.cap_twist(twist)

            angle = atan2(self.target_pos[1], self.target_pos[0])
            twist.linear = self.linear_speed * cos(angle)
            twist.angular = self.angular_speed * sin(angle)

            action_print = "TARGETING"
            if visualize:
                draw_segment_wrt_robot(robot, (0, 0), 
                                       (self.target_pos[0], self.target_pos[1]),
                                       color=(255, 255, 255), width=3)
        else:

            # FIRST METHOD: NO CONTROL OVER DISTANCE
            #twist = self.curve_to_angle(-pi/2, self.guidance_angle, self.no_target_turn_on_spot)

            # SECOND METHOD: BANG-BANG CONTROL OVER DISTANCE BY CURVING IN OR OUT
            # BY A FIXED ANGLE 
            """
            dist = sqrt(self.guidance_vec[0]**2 + self.guidance_vec[1]**2)
            gamma = self.curve_gamma
            twist = self.curve_to_angle(-pi/2 + gamma, self.guidance_angle)
            if dist < self.lmark_ideal_range:
                twist = self.curve_to_angle(-pi/2 - gamma, self.guidance_angle)
                action_print = "CURVING OUT"
                if visualize:
                    draw_ray(robot, gamma, color=(255,0,0))

            else:
                twist = self.curve_to_angle(-pi/2 + gamma, self.guidance_angle)
                action_print = "CURVING IN"
                if visualize:
                    draw_ray(robot, -gamma, color=(0,255,0))                    
            """

            dist = dist_to_lmarks
            ideal_dist = self.lmark_ideal_range
            # We want to use proportional control to keep the robot at
            # self.lmark_ideal_range away from the landmarks, while facing
            # orthogonal to them.  So we need an error signal for distance.
            # The most simpleminded such signal is this one:
            #       dist_error = ideal_dist - dist
            # But its unknown scale presents a problem.  So instead we use the 
            # following which lies in the range [-1, 1]:
            dist_error = (ideal_dist - min(dist, 2*ideal_dist)) / (ideal_dist)
            # One consequence of using this error signal is that the response
            # at a distance greater than 2*ideal_dist is the same as at
            # 2*ideal_dist.

            # But we also have to consider the current angle w.r.t. the
            # guidance angle.  Ideally, the guidance angle should be at -pi/2
            # w.r.t. the robot's forward axis when dist_error == 0.  If
            # dist_error is positive (meaning we are too close to the
            # landmarks) then the ideal guidance angle should be in the range
            # [-pi, -pi/2].  Otherwise, the ideal guidance angle should be in
            # the range [0, -pi/2].  We will use dist_error to scale linearly
            # within these ranges as we compute the ideal guidance angle.
            pi_2 = pi / 2.0
            ideal_guidance_angle = -pi_2 - pi_2 * dist_error
            action_print = "PROPORTIONAL CONTROL"

            # Now define the angle_error
            angle_error = get_smallest_signed_angular_difference(
                                                        self.guidance_angle,
                                                        ideal_guidance_angle)

            twist = self.diff_to_twist_bang_bang(angle_error)

        landmark_print = None
        if self.closest_pair != None:
            landmark_print = "PAIR"
            if visualize:
                draw_segment_wrt_robot(robot, self.closest_pair[0],
                    self.closest_pair[1], color=(255,255,255), width=3)
                draw_segment_wrt_robot(robot, (0,0), self.guidance_vec,
                    color=(0,255,255), width=3)
        elif self.guidance_vec != None:
            landmark_print = "SINGLE"
            if visualize:
                draw_segment_wrt_robot(robot, (0,0), self.guidance_vec,
                    color=(255,255,0), width=3)
        else:
            landmark_print = "NO LANDMARK"

        if visualize:
            print("{}: {}".format(landmark_print, action_print))

        return twist
Esempio n. 5
0
    def act(self, robot, sensor_dump, visualize=False):
        """ Set wheel speeds according to processed sensor data. """

        image = sensor_dump.lower_image

        # Check if there are no landmarks in view, but there are wall pixels we
        # can use for guidance.
        react_to_wall = False
        react_to_wall_angle = None
        #        if self.guidance_vec == None:
        closest_wall_angle = None
        closest_wall_range = self.wall_react_range
        for i in range(image.n_rows):
            if image.masks[i] == WALL_MASK:
                wall_angle = image.calib_array[i, 4]
                wall_range = image.calib_array[i, 5]
                if wall_range < closest_wall_range:
                    closest_wall_range = wall_range
                    react_to_wall_angle = wall_angle
                    react_to_wall = True

        # Check for the presence of another robot in the centre of the image.
        react_to_robot = False
        react_to_robot_angle = None
        for i in range(image.n_rows):
            if image.masks[i] == ROBOT_MASK:
                (xr, yr) = image.calib_array[i, 2], image.calib_array[i, 3]
                bot_a = image.calib_array[i, 4]
                bot_r = image.calib_array[i, 5]

                if fabs(bot_a) < pi / 4 and bot_r < self.robot_react_range:
                    react_to_robot = True
                    react_to_robot_angle = bot_a

        twist = Twist()
        if react_to_robot:
            # Turn slow in the open direction
            twist.linear = self.slow_factor * self.linear_speed
            twist.angular = self.angular_speed
            if visualize:
                print("ROBOT")
                draw_ray(robot, react_to_robot_angle, color=(255, 0, 0))

        elif react_to_wall:
            if visualize:
                print("WALL")
                draw_ray(robot, react_to_wall_angle, color=(255, 255, 0))
            #twist = self.curve_to_angle(pi/2, react_to_wall_angle, self.wall_turn_on_spot)
            twist.linear = uniform(-0.1, 0.1) * self.linear_speed
            twist.angular = -self.angular_speed

        elif self.guidance_vec == None:
            # No landmarks in view AND no walls in view.  Just go straight.
            twist.linear = self.linear_speed
            if visualize:
                print("STRAIGHT (NO LANDMARKS OR WALLS IN VIEW)")

        elif self.target_pos != None:
            twist.linear = self.linear_speed * sign(self.target_pos[0])
            twist.angular = self.angular_speed * sign(self.target_pos[1])
            if visualize:
                print("TARGETING")
                draw_segment_wrt_robot(
                    robot, (0, 0), (self.target_pos[0], self.target_pos[1]),
                    color=(255, 255, 255),
                    width=3)
        else:

            #twist = self.curve_to_angle(-pi/2, self.guidance_angle, self.no_target_turn_on_spot)
            dist = sqrt(self.guidance_vec[0]**2 + self.guidance_vec[1]**2)
            if dist < self.lmark_ideal_range:
                twist = self.curve_to_angle(-pi / 2 - 0.25,
                                            self.guidance_angle,
                                            self.no_target_turn_on_spot)
                if visualize:
                    print("CURVING OUT")
            else:
                twist = self.curve_to_angle(-pi / 2 + 0.25,
                                            self.guidance_angle,
                                            self.no_target_turn_on_spot)
                if visualize:
                    print("CURVING IN")

        return twist
Esempio n. 6
0
    def react(self, this_robot, sensor_suite, visualize=False):
        twist = Twist()

        scan = sensor_suite.range_scan
        n = len(scan.ranges)
        centre_angle = 0

        # EXPERIMENT TO ACHIEVE A MINIMUM SIZE AGGREGATE.
        """
        lscan = sensor_suite.landmark_scan
        closest_lmark_distance = float('inf')
        for i in range(len(lscan.ranges)):
            if (lscan.masks[i] & ARC_LANDMARK_MASK != 0
               and lscan.ranges[i] < closest_lmark_distance):
                closest_lmark_distance = lscan.ranges[i]
        if closest_lmark_distance < 200:
            centre_angle = 0.3
        """

        # Relying on only a single forward-pointing ray causes distant pucks
        # to be easily missed (they may be detected, but too sporadically to
        # attract the robot).  We accept as forward-pointing any sensor ray
        # within 'front_angle_threshold' of zero.  Correspondingly set the
        # two predicates 'react_to_puck' and 'react_to_robot'.
        react_to_puck = False
        react_to_robot = False
        for i in range(n):
            if fabs(scan.angles[i] +
                    centre_angle) < self.front_angle_threshold:
                if (scan.masks[i] & self.puck_mask) != 0:
                    react_to_puck = True
                if scan.masks[i] == ROBOT_MASK:
                    react_to_robot = True

        if react_to_robot:
            react_to_puck = False

        # Now react...
        if react_to_puck:
            print("PUCK")
            # Turn left
            twist.linear = self.linear_speed
            twist.angular = self.angular_speed
            if visualize:
                draw_ray(this_robot, 0, (255, 0, 255))

        elif react_to_robot:
            print("ROBOT")
            # Turn left and slow
            twist.linear = self.linear_speed * self.slow_factor
            twist.angular = self.angular_speed

            if visualize:  # Reacting to robot
                draw_ray(this_robot, 0, (255, 0, 0))
        else:
            print("NO PUCK/ROBOT -RIGHT")
            # Turn right
            twist.linear = self.linear_speed
            twist.angular = -self.angular_speed

            if visualize:
                draw_ray(this_robot, 0, (0, 255, 0))

        return twist