示例#1
0
def process_tunnel2(lidar_data):
    global LIDAR_FLAG
    if LIDAR_FLAG is False:
        return
    global DIRECTION
    global IS_TURNING

    TURTLE.enable_fish = False
    set_lidar_values(lidar_data)
    TURTLE.set_speed_smooth("fast")
    directions = ["front", "leftside", "rightside"]
    distances = {}
    for direction in directions:
        distances[direction] = get_object_distance(direction)

    if not IS_TURNING:
        print(distances["leftside"], distances["rightside"])
        if distances["leftside"] < 0.45 and distances["leftside"] > 0:
            TURTLE.turn("right", 1.5)
            DIRECTION = "left"
            IS_TURNING = True
        elif distances["rightside"] < 0.45 and distances["rightside"] > 0:
            TURTLE.turn("left", 1.5)
            DIRECTION = "right"
            IS_TURNING = True
    else:
        print(get_object_distance("front"))
        if get_object_distance("front") == 0:
            rospy.signal_shutdown("[TURTLE] shutting down...")
        else:
            TURTLE.turn(DIRECTION, 1.5)
        # TURTLE.turn(DIRECTION, 1.5)
        IS_TURNING = False
示例#2
0
def process_tunnel2():
    global DIRECTION
    global IS_TURNING
    global IS_INSIDE_TUNNEL

    TURTLE.set_speed_smooth("fast")
    directions = ["front", "leftside", "rightside"]
    distances = {}
    for direction in directions:
        distances[direction] = get_object_distance(direction)

    if not IS_INSIDE_TUNNEL:
        distance = get_object_distance("right")
        if (distance < 0.3 and distance < 0):
            IS_INSIDE_TUNNEL = True
            TURTLE.enable_fish = False
    elif not IS_TURNING:
        rospy.loginfo("\n[PROC] tunnel: " + distances["leftside"] +
                      str("   ") + distances["rightside"])
        if distances["leftside"] < 0.45 and distances["leftside"] > 0:
            TURTLE.turn("right", 1.5)
            DIRECTION = "left"
            IS_TURNING = True
        elif distances["rightside"] < 0.45 and distances["rightside"] > 0:
            TURTLE.turn("left", 1.5)
            DIRECTION = "right"
            IS_TURNING = True
    else:
        print(get_object_distance("front"))
        if get_object_distance("front") == 0:
            rospy.signal_shutdown("[TURTLE] shutting down...")
        else:
            TURTLE.turn(DIRECTION, 1.5)
        # TURTLE.turn(DIRECTION, 1.5)
        IS_TURNING = False
示例#3
0
def process_frontcam(image):
    """ process the frontcam image """
    if not SCHEDULER.is_frontcam_enable():
        return

    STATE = SCHEDULER.get_state()
    info = EYE.see_front(image)

    if SCHEDULER.debug_option["show_front_info"]:
        rospy.logdebug(info)

    if STATE == "default":
        # if EYE.is_boostable(image):
        process_acceleration(info)
        # signal = is_construction(image)
        # rospy.logdebug(signal)

    if STATE == "traffic_light":
        if is_light_green(image):
            TURTLE.enable()
            SCHEDULER.set_state("to_intersection")
        return

    if STATE == "to_intersection":
        signal = check_left_right_sign(image)
        if signal == "right":
            SCHEDULER.set_state("intersection_right")
        elif signal == "left":
            SCHEDULER.set_state("intersection_left")
        return

    if STATE == "intersection_right":
        # TODO: make algorithms for right
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    if STATE == "intersection_left":
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    if STATE == "to_construction":
        if EYE.is_boostable(image):
            TURTLE.boost()

        if is_construction(image):
            SCHEDULER.set_state("construction_searching")

    if STATE == "construction_searching":
        pass
示例#4
0
def process_tunnel():
    global DIRECTION

    TURTLE.set_speed_smooth("fast")
    directions = ["front", "leftside", "rightside"]
    min_distance = 1000
    for direction in directions:
        distance = get_object_distance(direction)
        if distance > 0 and distance < min_distance:
            min_distance = distance
    # distance = min(get_object_distance("front"), get_object_distance("leftside"), get_object_distance("rightside"))
    # distance = get_object_distance("front")
    rospy.loginfo(str(min_distance) + str(distance))
    if min_distance < 0.50 and min_distance > 0:
        TURTLE.turn(DIRECTION, 2.03, True, True)
        if DIRECTION == "left":
            DIRECTION = "right"
        elif DIRECTION == "right":
            DIRECTION = "left"
示例#5
0
def process_lidar(lidar_data):
    """ process the lidar image """
    if not SCHEDULER.is_lidar_enable():
        return
    if SCHEDULER.is_lidar_occupied() or TURTLE.is_occupied():
        return

    state = SCHEDULER.get_state()
    if SCHEDULER.debug_option["show_timer"] and (state != "construction"):
        SCHEDULER.check_time("lidar", min=0.4, stop_when_delay=False)

    set_lidar_values(lidar_data)
    state = SCHEDULER.get_state()

    front = get_object_distance("front")

    if (front < 0.15) and (front > 0):
        TURTLE.stop()
        return

    if state is "default":
        leftside = get_object_distance("leftside")
        print(leftside)
        if (leftside < 0.35) and (leftside > 0):
            rospy.Timer(rospy.Duration(5),
                        SCHEDULER.release_lidar_occupied,
                        oneshot=True)
            SCHEDULER.set_state("to_construction")
            return
    elif state is "to_construction":
        leftside = get_object_distance("leftside")
        print("to_construction: " + str(leftside))
        if (leftside < 0.35) and (leftside > 0):
            SCHEDULER.set_state("construction")
            return
        # rospy.Timer(
        #     rospy.Duration(0.15), SCHEDULER.release_lidar_occupied, oneshot=True
        # )
        # process_construction()
    elif state is "construction":
        process_construction()
    elif state is "parking":
        process_parking()
示例#6
0
def process_blocking():
    directions = ["front", "leftside", "rightside"]
    distances = {}
    rospy.loginfo("-------------")
    for direction in directions:
        distances[direction] = get_object_distance(direction)
        rospy.loginfo("\n[PROC] " + str(direction) + ": " + distances[direction])
    rospy.loginfo("-------------")

    if distances["rightside"] < 0.3 and distances["rightside"] > 0:
        TURTLE.enable_fish = False
        TURTLE.go_forward(2)
        SEEN_BLOCKING_BAR = True
    elif SEEN_BLOCKING_BAR:
        if distances["front"] > 0 or distances["leftside"] > 0 or distances["rightside"] > 0:
            TURTLE.enable_fish = True
            CURRENT_STATE = "tunnel"
        print_state()
    else:
        rospy.Timer(rospy.Duration(5.0), go_anyway, oneshot=True, reset=True)
示例#7
0
def process_lidar(lidar_data):
    """ process the lidar image """
    if not SCHEDULER.is_lidar_enable():
        return
    if SCHEDULER.is_lidar_occupied() or TURTLE.is_occupied():
        return

    state = SCHEDULER.get_state()
    if SCHEDULER.debug_option["show_timer"] and (state != "construction"):
        SCHEDULER.check_time("lidar", min=0.4, stop_when_delay=False)

    set_lidar_values(lidar_data)
    state = SCHEDULER.get_state()

    front = get_object_distance("front")
    if (front < 0.15) and (front > 0):
        TURTLE.stop()
        return

    # if STEP >= 10:
    #     process_parking()
    # else:
    #     process_construction()
    # return

    if state is "intersection_left":
        left = get_object_distance("left")
        if (left > 0) and (left < 0.50):
            TURTLE.set_speed("slow")
            SCHEDULER.set_state("to_construction")
    elif state is "to_construction":
        rospy.Timer(rospy.Duration(0.15),
                    SCHEDULER.release_lidar_occupied,
                    oneshot=True)
        process_construction_new()
    elif state is "construction":
        process_construction_new()
    elif state is "parking":
        process_parking()
    elif state is "tunnel":
        process_tunnel()
示例#8
0
def initialize():
    """ initialize processing """
    EYE.calibrate()
    TURTLE.set_speed('normal')
    if CURRENT_STATE is "traffic_light":
        TURTLE.disable()
    else:
        TURTLE.enable()
示例#9
0
def process_tunnel(lidar_data):
    global LIDAR_FLAG
    if LIDAR_FLAG is False:
        return
    global DIRECTION
    TURTLE.enable_fish = False
    set_lidar_values(lidar_data)
    TURTLE.set_speed_smooth("fast")
    directions = ["front", "leftside", "rightside"]
    min_distance = 1000
    for direction in directions:
        distance = get_object_distance(direction)
        if distance > 0 and distance < min_distance:
            min_distance = distance
    # distance = min(get_object_distance("front"), get_object_distance("leftside"), get_object_distance("rightside"))
    # distance = get_object_distance("front")
    rospy.loginfo(str(min_distance) + str(distance))
    if min_distance < 0.50 and min_distance > 0:
        TURTLE.turn(DIRECTION, 2.03, True, True)
        if DIRECTION == "left":
            DIRECTION = "right"
        elif DIRECTION == "right":
            DIRECTION = "left"
示例#10
0
def process_acceleration(info):
    if not info:
        return

    horizon = info['horizon_position']
    center = abs(info['center'])

    if EYE.get_front_state() == "straight":
        if horizon < 200 and center < 50:
            TURTLE.boost()
            if SCHEDULER.debug_option["show_front_info"]:
                rospy.logwarn("[PROC] Boosting")
        elif horizon < 260 and center < 50:
            TURTLE.set_speed_smooth('little')
    if horizon > 320 or info['state'] == 'turning':
        TURTLE.set_speed('normal')
    elif horizon > 260:
        TURTLE.set_speed_smooth('normal')
示例#11
0
def test_frontcam(image):
    """ process the frontcam image """
    # print("image is subscribed")
    if not SCHEDULER.is_frontcam_enable():
        return

    if not EYE.is_front_occupied():

        STATE = SCHEDULER.get_state()

        if STATE == "traffic_light":
            signal = is_light_green(image)
            rospy.logdebug("[PROC] is_light_green: {}"
                           .format(signal))
            if signal:
                SCHEDULER.set_state("to_intersection")
                TURTLE.enable()
                TURTLE.set_speed("fast")
                TURTLE.set_speed_smooth("normal")

        elif STATE == "to_intersection":
            signal = check_left_right_sign(image)
            rospy.logdebug("[PROC] left or right: {}"
                           .format(signal))
            if signal == "right":
                SCHEDULER.set_state("intersection_right")
            elif signal == "left":
                SCHEDULER.set_state("intersection_left")
        if SCHEDULER.debug_option["show_center_slope"]:
            SCHEDULER.check_time("frontcam", min=0.5)
        info = EYE.see_front(image)
        if info is None:
            return

        # rospy.logdebug("[PROC] test_frontcam: state {}  {}".format(
        #     info["state"], info["horizon_position"]))
        # if (info["horizon_position"] < 150):
        #     TURTLE.set_speed("fast")
        #     TURTLE.set_speed_smooth("normal")

        if (info["state"] == "turning") and (info["turning_to"] is not "None"):
            rospy.logdebug(
                "[PROC] turn off for 1.5s")
            rospy.Timer(rospy.Duration(1.5),
                        EYE.release_front_occupied, oneshot=True)
        else:
            rospy.Timer(rospy.Duration(0.1),
                        EYE.release_front_occupied, oneshot=True)
示例#12
0
def process_frontcam(image):
    """ process the frontcam image """
    if not SCHEDULER.is_frontcam_enable():
        return

    state = SCHEDULER.get_state()
    info = EYE.see_front(image)

    if SCHEDULER.debug_option["show_front_info"]:
        rospy.logdebug(info)

    if state == "default":
        # if EYE.is_boostable(image):
        process_acceleration(info)
        # SCHEDULER.set_state("to_intersection")
        # signal = is_construction(image)
        # rospy.logdebug(signal)
        # if is_construction(image):
        #     TURTLE.boost()
        #     SCHEDULER.set_state("construction")

    if state == "traffic_light":
        if is_light_green(image):
            TURTLE.enable()
            SCHEDULER.set_state("to_intersection")
        return

    # NOTE: temporary settings:
    if state == "to_intersection":
        # rospy.Timer(rospy.Duration(35), SCHEDULER.enable_lidar, oneshot=True)
        SCHEDULER.set_state("intersection_left")

    # if state == "to_intersection":
    #     signal = check_left_right_sign(image)
    #     if signal == "right":
    #         SCHEDULER.set_state("intersection_right")
    #     elif signal == "left":
    #         SCHEDULER.set_state("intersection_left")
    #     return

    if state == "intersection_right":
        # TODO: make algorithms for right
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    # NOTE: temporary settings:
    if state == "intersection_left":
        # if EYE.is_boostable(image):
        #     TURTLE.boost()
        #     SCHEDULER.set_state("to_construction")
        return

    if state == "to_construction":
        # if EYE.is_boostable(image):
        # TURTLE.boost()
        EYE.check_yellow = True
        # if is_construction(image):
        # SCHEDULER.set_state("construction_searching")

    if state == "construction_searching":
        pass
示例#13
0
def process_construction_new():
    """ process construction state """
    global STEP
    global NUM_OBSTRUCTION
    global LANE_TO
    global BUF_ANGULAR
    global BUF_SIZE

    if TURTLE.is_occupied():
        return

    if STEP == 0:
        # TURTLE.set_speed("normal")
        leftside = get_object_distance("leftside")
        left = get_object_distance("left")

        if leftside > 0:
            rospy.logdebug("[PROC] LIDAR LEFTSIDE: {}".format(leftside))

        if (leftside > 0) and (leftside < 0.50) and (left > 1.00):
            EYE.check_yellow = False
            SCHEDULER.set_state("construction")
            rospy.loginfo("[PROC] construction state started.")

            STEP = 1
            rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
            TURTLE.go_forward(3.5)
            return
        else:
            return

    elif STEP == 1:
        TURTLE.set_speed("normal")
        TURTLE.set_speed_smooth("slow")
        TURTLE.turn(0.13, 0)

        left = get_object_distance("left")
        leftback = get_object_distance("leftback")
        rospy.logdebug("[PROC] LIDAR LEFT: {:.2f} LEFTBACK: {:.2f}}".format(
            left, leftback))
        if (left > 0) and (left < 0.50):
            return
        else:
            TURTLE.set_speed("slow")
            if (leftback > 0.5):
                # TURTLE.go_forward(2.5)
                STEP = 3
                rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
            return

    elif STEP == 2:
        # TODO: write code for first left lane
        pass

    elif STEP == 3:
        TURTLE.set_speed("normal")
        TURTLE.set_speed_smooth("stop")
        front = get_object_distance("front")
        right_biased = get_object_distance("right_biased")

        if (front > 1.0) and (right_biased < 1.0) and (right_biased > 0.0):
            print("passed", front, right_biased)
            print("BUF_SIZE: ", BUF_SIZE)
            BUF_SIZE = 15
            reset_buffer()
            TURTLE.set_speed_smooth("slow")
            pass
        elif front * right_biased == 0:
            return
        else:
            TURTLE.set_speed("stop")
            print("turning...", front, right_biased)
            TURTLE.turn(0.13, 1.0)
            # rospy.sleep(rospy.Duration(5.0))
            return
        # TURTLE.go_turn("left")

    elif STEP == 4:
        right_biased = get_object_distance("right_biased")
        left_biased = get_object_distance("left_biased")
        front = get_object_distance("front")

        if right_biased == 0.0:
            right_biased = 3.0
        if left_biased == 0.0:
            left_biased = 3.0
        if front == 0.0:
            front = 3.0
        elif front < 0.2:
            TURTLE.set_speed_smooth("stop")
        else:
            TURTLE.set_speed_smooth("slow")

        min_distance = min(right_biased, left_biased)

        degree = 0
        if (front < 1.0):
            degree += max(pow(1.0 - front, 2), 0)
        else:
            degree += max(0.5 - min_distance, 0) * 3
        # if min_distance < 0.5:
        # degree += max((0.5 - min_distance), 0) * 1.5
        # elif (min_distance > 1.0) and (min_distance < 3.0):
        # degree = 0.2

        if (left_biased == min_distance) and (min_distance < 0.5):
            degree *= -1

        # max_distance = max(right_biased, left_biased)
        # if (left_biased == max_distance):
        #     degree *= -1

        # degree = 0
        # if min_distance > 0 and min_distance < 0.5:
        #     if right_biased > left_biased:
        #         degree = (0.5 - min_distance) * (-7)
        #         LANE_TO = "right"
        #     elif right_biased < left_biased:
        #         degree = (0.5 - min_distance) * (7)
        #         LANE_TO = "left"

        # if is_left_crashable():
        #     degree = -1.7
        # elif is_right_crashable():
        #     degree = 1.7

        degree *= 3
        degree = max(min(degree, 2.0), -2.0)
        BUF_ANGULAR.append(degree)
        degree -= BUF_ANGULAR.pop(0)
        print("BUF_ANGULAR:", BUF_ANGULAR)
        # if degree != 0:
        #     BUF_ANGULAR.append(degree)

        # elif len(BUF_ANGULAR) > 9:
        #     STEP = 5

        if SCHEDULER.debug_option["show_construction_lidar"]:
            rospy.logdebug(
                "[PROC] r_based: {:.2f}  l_based: {:.2f}  min: {:.2f}  front: {:.2f}  deg: {:.2f}"
                .format(right_biased, left_biased, min_distance, front,
                        degree))

        TURTLE.turn(0.13, degree)
        return

    elif STEP == 5:
        print("[StEP 5]")
        if len(BUF_ANGULAR) > 0:
            TURTLE.turn(0.13, -BUF_ANGULAR.pop(0))
            return
        else:
            front = get_object_distance("front")
            print(front)
            # if (front > 0) and (front < 1.0):
            #     if LANE_TO == "right":
            #         TURTLE.turn(0.13, 0.8)
            #     else:
            #         TURTLE.turn(0.13, -0.8)
            #     return
            # else:
            if NUM_OBSTRUCTION < 1:
                NUM_OBSTRUCTION += 1
                STEP = 4
                return

    elif STEP == 6:
        TURTLE.go_forward(1)
        TURTLE.set_speed("normal")
        TURTLE.go_turn("left")
        TURTLE.set_speed("normal")
        TURTLE.set_speed("fast")
        TURTLE.set_speed_smooth("normal")
        TURTLE.go_forward(5)

        STEP = 10
        SCHEDULER.set_state("parking")
        BUF_SIZE = 3
        reset_buffer()
        return

    STEP += 1
    rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
示例#14
0
#!/usr/bin/env python3

import rospy
# from std_msgs.msg import String
from sensor_msgs.msg import CompressedImage, LaserScan
import processor
from constants import PATH_RASPICAM, PATH_USBCAM, PATH_LIDAR, PATH_GALAPAGOS_STATE
from turtlebot import TURTLE
from scheduler import SCHEDULER  # ! not used in contest

TURTLE.set_speed("normal")
# TURTLE.disable()
# TURTLE.set_speed("fast")
# TURTLE.set_speed_smooth("normal")

# SCHEDULER.set_state("traffic_light")
SCHEDULER.set_state("default")
# SCHEDULER.set_state("to_intersection")
# SCHEDULER.set_state("construction")
# SCHEDULER.set_state("parking")

rospy.Subscriber(PATH_USBCAM,
                 CompressedImage,
                 processor.process_subcam,
                 queue_size=1)
# rospy.Subscriber(PATH_LIDAR, LaserScan,
#  processor.test_block_escaping, queue_size=1)
# rospy.Subscriber(PATH_LIDAR, LaserScan,
#  processor.process_lidar, queue_size=1)
# rospy.Subscriber(PATH_RASPICAM, CompressedImage,
#                  processor.process_frontcam, queue_size=1)
示例#15
0
def process_lidar(lidar_data):
    """ process the lidar data """
    global CURRENT_STATE
    global IS_TURNING
    global STATE_CONSTRUCTION
    global DIRECTION
    global TURN_COUNT
    global ENABLE_LIDAR
    global ENABLE_FRONT

    if not ENABLE_LIDAR:
        return

    set_lidar_values(lidar_data)

    if CURRENT_STATE is "construction":
        if not TURTLE.is_settable():
            return

        rospy.loginfo(
            str(STATE_CONSTRUCTION) + "    " +
            str(get_object_distance('left')) + "   " +
            str(get_object_distance('front')))

        if STATE_CONSTRUCTION == "searching":
            distance = get_object_distance("left")
            if distance == 0:
                return
            elif distance < 0.35:
                STATE_CONSTRUCTION = "ready"
            elif distance > 0.35 and distance < 1:
                STATE_CONSTRUCTION = "start"
                DIRECTION = "right"
                TURTLE.enable_fish = False
                TURTLE.turn("left", 2.03, True)
            return

        elif STATE_CONSTRUCTION == "ready":
            distance = get_object_distance("front")
            if distance == 0:
                return
            elif distance < 0.5:
                # TURTLE.change_line("left", 0.5)
                DIRECTION = "left"
                TURTLE.enable_fish = False
                TURTLE.change_line("left", 1)
                # TODO: FIX change_line()
                STATE_CONSTRUCTION = "start"
            return

        elif STATE_CONSTRUCTION == "start":
            if not IS_TURNING:
                if DIRECTION == 'right':
                    distance = get_object_distance('leftside')
                else:
                    distance = get_object_distance('rightside')

                rospy.loginfo('\n[PROC] distance on ' + DIRECTION + ' : ' +
                              str(distance))

                if distance == 0:
                    return
                elif distance < 0.35:
                    TURTLE.turn(DIRECTION, 1.2)
                    IS_TURNING = True
                    # TURTLE.change_line(DIRECTION, 1)
                    reverse_direction()
                    TURN_COUNT = TURN_COUNT + 1
                else:
                    TURTLE.set_speed('normal')
            else:
                if DIRECTION == 'right':
                    distance = get_object_distance('frontleft')
                else:
                    distance = get_object_distance('frontright')

                # rospy.loginfo('\n[PROC] turning distance on ' +
                # DIRECTION + ' : ' + str(distance))
                if distance > 0.34 or distance == 0:
                    reverse_direction()
                    TURN_COUNT = TURN_COUNT + 1
                    TURTLE.turn(DIRECTION, 1.2)
                    IS_TURNING = False

            if TURN_COUNT == 3:
                TURTLE.turn("left", 1.5, True)
                STATE_CONSTRUCTION = "ending"

        elif STATE_CONSTRUCTION == "ending":
            TURTLE.go_forward(1.0)
            CURRENT_STATE = "before_parking"
            print_state()
            # rospy.signal_shutdown("\n[PROC] Shutting down...")

    if CURRENT_STATE is "blocking":
        ENABLE_FRONT = False
        process_blocking()
    if CURRENT_STATE is "tunnel":
        process_tunnel2()
示例#16
0
def process_tunnel():
    """ process tunnel state """

    if TURTLE.is_occupied():
        return

    if STEP == 30:
        # TODO: turn to 45 degrees
        pass
    elif STEP == 31:
        TURTLE.set_speed("normal")
        # TURTLE.go_turn("right", 1, angular=2, speed=0.15)
        front = get_object_distance("front")
        left = get_object_distance("left")
        right = get_object_distance("right")
        rospy.logdebug(
            "front: {}  left: {}  right: {}".format(front, left, right))
        if left < 0.20:
            # TODO: turn to variable degrees
            TURTLE.turn(0.15, -6)
            STEP = 34
            return
        elif right < 0.20:
            # TODO: turn to variable degrees
            TURTLE.turn(-0.15, -6)
            STEP = 35
            return
        elif front == 0 or front > 0.40:
            return
        else:
            TURTLE.turn(0.15, -6)
            TURTLE.turn(0.15, -6)
    elif STEP == 32:
        TURTLE.set_speed("normal")
        frontleft = get_object_distance("frontleft")
        rospy.logdebug("frontleft: {}".format(frontleft))
        if frontleft == 0 or frontleft < 0.40:
            return
        else:
            rospy.sleep(rospy.Duration(1.8))
            TURTLE.turn(0.13, 6)
    elif STEP == 33:
        TURTLE.set_speed("normal")
        frontleft = get_object_distance("frontleft")
        rospy.logdebug("frontleft: {}".format(frontleft))
        if frontleft == 0 or frontleft < 0.40:
            return
        else:
            STEP = 31
            return
    elif STEP == 34:
        front = get_object_distance("front")
        if front == 0 or front > 0.20:
            return
        # TODO : turn to left
        TURTLE.turn(-0.3, -6)
    elif STEP == 35:
        # NOTE: ending step
        TURTLE.set_speed("normal")
    else:
        return
    STEP += 1
示例#17
0
#!/usr/bin/env python3

import rospy
from sensor_msgs.msg import CompressedImage, LaserScan

import processor
from constants import PATH_RASPICAM, PATH_USBCAM, PATH_LIDAR
from turtlebot import TURTLE
from scheduler import SCHEDULER

TURTLE.set_speed("fast")
TURTLE.set_speed_smooth("normal")

SCHEDULER.set_state("default")
# SCHEDULER.set_state("parking")
# SCHEDULER.set_state("construction")

rospy.Subscriber(PATH_USBCAM,
                 CompressedImage,
                 processor.process_subcam,
                 queue_size=1)
rospy.Subscriber(PATH_LIDAR, LaserScan, processor.process_lidar, queue_size=1)
rospy.Subscriber(PATH_RASPICAM,
                 CompressedImage,
                 processor.process_frontcam,
                 queue_size=1)
def process_fish_image(image):
    """ process the fisheye lens image """

    trace_line(image)
    if CURRENT_STATE == 'intersection':
        if sign_intersection == 'left':
            TURTLE.set_speed_by_percentage(0.5)
            TURTLE.set_angular(TURTLE._angular + 0.2)
        elif sign_intersection == 'right':
            TURTLE.set_speed_by_percentage(0.5)
            TURTLE.set_angular(TURTLE._angular - 0.2)
    elif CURRENT_STATE == 'stop_sign':
        if left_detected > right_detected :
            TURTLE.set_speed_by_percentage(0.5)
            TURTLE.set_angular(TURTLE._angular + 0.2)
        elif right_detected < left_detected :
            TURTLE.set_speed_by_percentage(0.5)
            TURTLE.set_angular(TURTLE._angular - 0.2)
    TURTLE.move()
示例#19
0
def process_subcam(image):
    """ process the subcam image """

    if not SCHEDULER.is_subcam_enable():
        return
    if SCHEDULER.is_subcam_occupied() or TURTLE.is_occupied():
        return

    info = EYE.see_sub(image)

    if SCHEDULER.debug_option["show_timer"]:
        # Check delay only if has line
        # SCHEDULER.check_time("subcam", min=0.28, stop_when_delay=info["has_line"])
        SCHEDULER.check_time("subcam", min=0.28, stop_when_delay=False)

    if info is None:
        rospy.logwarn("[PROC] No Information!")
        return

    center = info["center"]
    slope = info["slope"]

    if slope < -0.5:
        limit = 1.6
        amplitude = 1.0
    else:
        limit = 1.2
        amplitude = 0.8

    limit /= 1.9
    # amplitude /= 2

    state = SCHEDULER.get_state()
    if (EYE.get_front_state() == "straight") and (state is not "zigzag"):
        if (abs(center) < 30) and slope < -0.4:
            degree = pow(abs(slope) / 1.8, 1.1) * amplitude
        elif center < 0:
            degree = pow(abs(center) / 100, 2.0) * amplitude / 2
        elif center > 0:
            degree = -pow(abs(center) / 100, 2.0) * amplitude
        else:
            degree = 0
    else:
        if slope < 0:
            degree = -pow(abs(slope) / 1.8, 2.9) * amplitude * 28.0
        else:
            degree = pow(abs(slope) / 1.0, 1.2) * amplitude * 4.0

        # degree = pow(abs(slope) / 1.8, 0.9) * amplitude
        # elif center < 0:
        #     degree = pow(abs(center) / 100, 1.9) * amplitude
        # elif center > 0:
        #     degree = - pow(abs(center) / 100, 1.9) * amplitude
        # else:
        #     degree = 0

    buf_sum = sum(BUF_ANGULAR)
    if EYE.get_front_state() == "straight":
        adjust_angular = BUF_ANGULAR.pop(0) * 0.9
        BUF_ANGULAR.append(degree)
        degree -= adjust_angular
        # if abs(buf_sum) > 1:
    else:
        reset_buffer()
        adjust_angular = 0

    degree = max(min(degree, limit), -limit)

    if not info["has_line"]:
        if center > 200:
            # degree = -1.2
            degree = -1.1  # For enhancing frequency
        # elif EYE.get_front_state() == "straight":
        #     degree = 0.6
        else:
            # degree = 1.4
            degree = 1.3  # For enhancing frequency
    # if not info["has_line"]:
    #     if center < -55:
    #         # degree = 1.6
    #         degree = 1.4  # For slow speed
    #     elif center > 50:
    #         degree = -1.2
    #         # degree = -1.2  # For slow speed
    #     # elif center > 19:
    #     #     degree = -1.2
    #     else:
    #         degree = 1.4

    if SCHEDULER.debug_option["show_center_slope"]:
        rospy.logdebug(
            "[PROC] center: {:.2f}  slope: {:.2f}  degree: {:.2f}  adj: {:.2f}  buf_sum: {:.2f}  {}  {}"
            .format(center, slope, degree, adjust_angular, buf_sum,
                    EYE.get_front_state(), info["has_line"]))

    rospy.Timer(rospy.Duration(0.15),
                SCHEDULER.release_subcam_occupied,
                oneshot=True)
    TURTLE.turn(0.13, degree)
示例#20
0
def test_block_escaping(lidar_data):
    global STEP

    set_lidar_values(lidar_data)

    if not TURTLE.is_occupied():
        if STEP == 1:
            TURTLE.set_speed("normal")
            # TURTLE.go_turn("right", 1, angular=2, speed=0.15)
            front = get_object_distance("front")
            rospy.logdebug("front: {}".format(front))
            if front == 0 or front > 0.40:
                return
            else:
                TURTLE.turn(0.15, -6)
        elif STEP == 2:
            TURTLE.set_speed("normal")
            frontleft = get_object_distance("frontleft")
            rospy.logdebug("frontleft: {}".format(frontleft))
            if frontleft == 0 or frontleft < 0.40:
                return
            else:
                rospy.sleep(rospy.Duration(1.8))
                TURTLE.turn(0.13, 6)
        elif STEP == 3:
            TURTLE.set_speed("slow")
        else:
            return
        STEP += 1
    else:
        print("IS OCCUPIED!")
示例#21
0
def test_parking(image):
    global STEP

    if not TURTLE.is_occupied():
        if STEP == 1:
            TURTLE.go_forward(0.5)
        elif STEP == 2:
            TURTLE.set_speed("normal")
            TURTLE.go_turn("right", 2)
        elif STEP == 3:
            TURTLE.go_forward(0.7)
            # for stopping for a while
            rospy.sleep(rospy.Duration(0.5))
        elif STEP == 4:
            TURTLE.go_backward(1)
        elif STEP == 5:
            TURTLE.go_turn_backward(1.2)
        elif STEP == 6:
            TURTLE.go_forward(2)
        elif STEP == 7:
            TURTLE.set_speed("fast")
            TURTLE.set_speed_smooth("normal")
            # TURTLE.set_speed("normal")
        else:
            return
        STEP += 1
def process_front_image(image):
    """ process the image of raspicam """
    global CURRENT_STATE
    global MOVING_POSITION
    global SEEN_PARKING_SIGN
    global SEEN_TUNNEL_SIGN

    raw_data = np.fromstring(image.data, np.uint8)
    cv_img = cv2.imdecode(raw_data, cv2.IMREAD_COLOR)

    #### ROI SETTING ######
    blob_ROI = cv_img[100:, :]
    #######################
    if CURRENT_STATE == 'traffic_light':
        if is_light_green(cv_img):
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            CURRENT_STATE = 'intersection'
            return
        else:
            return

    if CURRENT_STATE == 'intersection':
        if is_intersection(cv_img):
            TURTLE.set_weight(0.7)
            CURRENT_STATE = 'left_or_right'
            return
        else:
            return


    if CURRENT_STATE == 'left_or_right':
        tmp_state = check_left_right_sign(blob_ROI)
        if tmp_state == 'right':
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            TURTLE.LINE_BASE = 2
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'left':
            #print("11tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            TURTLE.LINE_BASE = 1
            #print("22tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'none':
            return
    
    if CURRENT_STATE == 'inter_curving':
        global straight_cnt
        if abs(TURTLE.weight*TURTLE._angular) < 0.1:
            straight_cnt += 1
            if straight_cnt > 3 :
                straight_cnt = 0
                TURTLE.LINE_BASE = 3
                CURRENT_STATE = 'construct_recog'
                return
            else:
                return
        else:
            return

    if CURRENT_STATE == 'construct_recog':
        tmp_state = is_construction(blob_ROI)
        if tmp_state is True:
            TURTLE.LINE_BASE = 2
            CURRENT_STATE = 'construction_ready'
        else:
            return

    if CURRENT_STATE == 'construction_ready':
        global curving_cnt
        if abs(TURTLE.weight*TURTLE._angular) > 0.25 :
            curving_cnt += 1
            if curving_cnt > 5 :
                curving_cnt = 0
                CURRENT_STATE = 'construction'
                TURTLE.enable_fish = False
            else:
                return
        else:
            return

    if CURRENT_STATE == 'construction':
        '''
        task for Ji-hyung
        '''
    '''
示例#23
0
def process_construction():
    """ process construction state """
    global STEP
    global NUM_OBSTRUCTION
    global LANE_TO

    if TURTLE.is_occupied():
        return

    if STEP == 0:
        leftside = get_object_distance("leftside")
        left = get_object_distance("left")
        if leftside > 0:
            rospy.logdebug("[PROC] LIDAR LEFTSIDE: {}".format(
                leftside))
        if (leftside > 0) and (leftside < 0.40):
            STEP = 1
            rospy.logdebug("[PROC] STEP changed to {}".format(STEP))

            SCHEDULER.disable_cams()
            rospy.loginfo("[PROC] construction state started.")
            TURTLE.go_forward(3.5)
            rospy.sleep(rospy.Duration(0.5))
            return
        else:
            return
        # elif (left > 0) and (left < 1.5):
        #     STEP = 2
        #     rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
        #     return

        # rospy.sleep(rospy.Duration(2))
    elif STEP == 1:
        TURTLE.set_speed("normal")
        TURTLE.set_speed_smooth("slow")
        left = get_object_distance("left")
        if left > 0:
            rospy.logdebug("[PROC] LIDAR LEFT: {}".format(
                left))
        if (left < 0.50) or (left > 1.5):
            return
        else:
            STEP = 3
            rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
            return
    elif STEP == 2:
        # TODO: write code for first left lane
        pass
    elif STEP == 3:
        TURTLE.go_turn("left", speed=0.11)
        LANE_TO = "left"
    elif STEP == 4:
        TURTLE.set_speed("normal")
        reverse_lane()
        biased = get_object_distance(LANE_TO + "_biased")
        if biased > 0:
            rospy.logdebug("[PROC] LIDAR {:s}_BIASED: {}"
                           .format(LANE_TO, biased))
        reverse_lane()

        if (biased == 0) or (biased > 0.30):
            return

        TURTLE.go_turn(LANE_TO, duration=0.5, angular=4.2)
        TURTLE.set_speed("normal")
        # TURTLE.set_speed("fast")
        # if LANE_TO is "left":
        #     TURTLE.turn(0.13, 1.3, consuming_time=1.5)
        # else:
        #     TURTLE.turn(0.13, -1.3, consuming_time=1.5)
        rospy.sleep(rospy.Duration(2.2))
        reverse_lane()

    elif STEP == 5:
        TURTLE.go_turn(LANE_TO, duration=0.7, angular=4.2)
        TURTLE.set_speed("normal")
        # TURTLE.set_speed("fast")
        # if LANE_TO is "left":
        #     TURTLE.turn(0.13, 1.3, consuming_time=1.5)
        # else:
        #     TURTLE.turn(0.13, -1.3, consuming_time=1.5)

        NUM_OBSTRUCTION += 1
        if NUM_OBSTRUCTION < 2:
            STEP = 4
            return

    elif STEP == 6:
        TURTLE.go_forward(1)
        rospy.sleep(rospy.Duration(0.6))
    elif STEP == 7:
        TURTLE.go_turn("left", duration=0.8, angular=3)
    elif STEP == 8:
        TURTLE.set_speed("fast")
        TURTLE.set_speed_smooth("normal")
        TURTLE.go_forward(5)
        rospy.sleep(rospy.Duration(0.5))
    elif STEP == 9:
        # NOTE: turn to parking step
        STEP = 10
        SCHEDULER.set_state("parking")
    else:
        return
    STEP += 1
    rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
示例#24
0
def process_parking():
    """ process parking state """
    global STEP

    # if TURTLE.is_occupied():
    #     return

    if STEP == 10:
        frontleft = get_object_distance("frontleft")
        frontright = get_object_distance("frontright")

        if SCHEDULER.debug_option["show_parking_lidar"]:
            rospy.logdebug("front: {:.2f}  frontleft: {:.2f}".format(
                get_object_distance("front"), get_object_distance("frontleft")
            ))

        if (frontleft > 0) and (frontleft < 0.5):
            STEP = 11
            rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
        elif (frontright > 0) and (frontright < 0.5):
            STEP = 12
            rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
        # NOTE: return is needed to prevent executing STEP += 1
        return

    elif STEP == 11:
        SCHEDULER.disable_cams()
        TURTLE.set_speed("normal")
        TURTLE.go_turn("right", 2)
        STEP = 13
        rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
        return

    elif STEP == 12:
        SCHEDULER.disable_cams()
        TURTLE.set_speed("normal")
        TURTLE.go_turn("left", 2)
        STEP = 13
        rospy.logdebug("[PROC] STEP changed to {}".format(STEP))
        return

    elif STEP == 13:
        TURTLE.go_forward(0.7)
        # for stopping for a while
        rospy.sleep(rospy.Duration(0.5))

    elif STEP == 14:
        TURTLE.go_backward(1.0)
    elif STEP == 15:
        TURTLE.go_turn_backward(1.1)
    elif STEP == 16:
        TURTLE.set_speed("normal")
        TURTLE.go_forward(1)
    elif STEP == 17:
        # TURTLE.set_speed("fast")
        # TURTLE.set_speed_smooth("normal")
        TURTLE.set_speed("normal")
        SCHEDULER.set_state("zigzag")
    else:
        return
    STEP += 1
示例#25
0
def process_subcam(image):
    """ process the subcam image """
    start = timeit.default_timer()

    global LINE_BASE
    global TRACKING
    global TURNING_TO
    global TEST_ANGULAR

    if TRACKING is not "fish":
        return

    if not SCHEDULER.is_subcam_occupied():
        # if True:
        rospy.Timer(rospy.Duration(0.04),
                    SCHEDULER.release_subcam_occupied,
                    oneshot=True)
        info = EYE.see_sub(image)
        # rospy.Timer(rospy.Duration(0.04), SCHEDULER.release_subcam_occupied, oneshot=True, reset=True)

        if info is None:
            print("NO INFO!")
            # TURTLE.set_angular_smooth(0.12)
            # pass
        else:
            # TURTLE.set_speed('slow')
            # if info["slope"]:
            #     TURTLE.set_speed_by_percentage(-abs(info["slope"] / 6))
            # else:
            TURTLE.set_speed('normal')

            center_x, center_y, point_x, point_y = (
                value for value in info["line_center"])
            gap_x = abs(point_x - center_x)
            rospy.loginfo("\n[PROC] info: \n" + str(info))
            rospy.loginfo("\nGAP: " + str(gap_x))

            if info["slope"] > 0:
                slope = math.sqrt(0.5 * info["slope"])
            else:
                slope = -math.sqrt(-0.5 * info["slope"])

            if not info["has_line"]:
                if gap_x > 100:
                    if TEST_ONCE:
                        TURTLE.set_angular(-1.75)
                        TURTLE.set_angular_smooth(-0.1)
                    else:
                        TURTLE.set_angular_smooth(-0.1)
                elif gap_x < 0:
                    if TEST_ONCE:
                        TURTLE.set_angular(1.75)
                        TURTLE.set_angular_smooth(0.2)
                    else:
                        TURTLE.set_angular_smooth(0.2)
                TEST_ONCE = False
            # elif gap_x > 100:
            #     TURTLE.set_angular(-0.15 * info["slope"] * 4)
            #     TURTLE.set_angular_smooth(-0.1)
            else:
                TEST_ONCE = True
                if gap_x > 100:
                    TURTLE.set_angular(-0.15 + slope)
                    TURTLE.set_angular_smooth(-0.1)
                elif gap_x > 30:
                    TURTLE.set_angular(0)
                else:
                    # if slope < 0:
                    #     TURTLE.set_angular(-0.25 + slope * 1.5)
                    #     TURTLE.set_angular_smooth(-0.1)
                    # else:
                    rospy.loginfo("LOST")
                    TURTLE.set_angular(0.45 + slope)
                    TURTLE.set_angular_smooth(0.1)

        # SCHEDULER.release_subcam_occupied()
        end = timeit.default_timer()
        # print("sub l: {:d}".format(info["left"]) + "   s: {:.01f}".format(slope)
        #     + "  time: {:.02f}".format(end - start))
        print("\nTIME: {:.02f}".format(end - start))
示例#26
0
#!/usr/bin/env python3

import rospy
# from std_msgs.msg import String
from sensor_msgs.msg import CompressedImage, LaserScan
from geometry_msgs.msg import Twist
import viewer
import processor
from turtlebot import TURTLE
from scheduler import SCHEDULER  # ! not used in contest

from constants import PATH_RASPICAM, PATH_USBCAM, PATH_LIDAR, PATH_GALAPAGOS_STATE

TURTLE.disable()
SCHEDULER.enable_lidar()
SCHEDULER.set_state("default")
# SCHEDULER.set_state("to_intersection")
# SCHEDULER.set_state("traffic_light")

rospy.Subscriber(PATH_USBCAM,
                 CompressedImage,
                 processor.process_subcam,
                 queue_size=1)
rospy.Subscriber(PATH_RASPICAM,
                 CompressedImage,
                 processor.process_frontcam,
                 queue_size=1)
# rospy.Subscriber(PATH_LIDAR, LaserScan,
#                  processor.process_lidar, queue_size=1)
# rospy.Subscriber('/cmd_vel', Twist, viewer.view_speed, queue_size=1)
示例#27
0
def process_eye(image):
    """ process the Eye System """
    global TRACKING
    global TURNING_TO

    # if TRACKING is not "front":
    #     return
    # rospy.loginfo('\n[PROC] frontcam image received')
    # rospy.loginfo(EYE.is_occupied())

    if not SCHEDULER.is_frontcam_occupied():
        info = EYE.see(image)
        # rospy.loginfo("\n[PROC] info: " + str(info))
        rospy.Timer(rospy.Duration(0.1), EYE.release_occupied, oneshot=True)

        if TRACKING is not "front":
            return
        if info is None:
            pass
        else:
            center = info["center"]
            TURTLE.set_speed('normal')

            if info["state"] is "straight":
                # TURTLE.set_speed('normal')
                pass
            elif info["state"] is "lost_track":
                rospy.loginfo("\n[PROC] state: lost_track")
                TURNING_TO = info["turning_to"]
                TURTLE.go_forward(1)
                rospy.Timer(rospy.Duration(1),
                            track_fish,
                            oneshot=True,
                            reset=True)
            # elif info["state"] is "turning":
            #     TURNING_TO = info["turning_to"]
            #     track_fish()
            # rospy.Timer(rospy.Duration(5.5), track_fish, oneshot=True, reset=True)

            if center > 500:
                TURTLE.set_angular(0.27)
                rospy.loginfo("\n[PROC] [!] turning to right at " +
                              str(center))
                return
            if center < -500:
                TURTLE.set_angular(-0.27)
                rospy.loginfo("\n[PROC] [!] turning to left at " + str(center))
                return
            if center > 60:
                TURTLE.set_angular(0.12)
                # TURTLE.set_angular(0.08)
                rospy.loginfo("\n[PROC] turning to right at " + str(center))
                return
            elif center < -60:
                TURTLE.set_angular(-0.12)
                # TURTLE.set_angular(-0.08)
                rospy.loginfo("\n[PROC] turning to left at " + str(center))
                return
        TURTLE.set_angular_smooth(0)
    return
示例#28
0
if __name__ == '__main__':
    try:
        RUN_TYPE = 'run_' + sys.argv[1]

        # NOTE: publisher to stop viewers
        PUBLISHER_PACKAGE = rospy.Publisher(PATH_GALAPAGOS_STATE, String, queue_size=5)
        MSG = String()
        MSG.data = RUN_TYPE
        PUBLISHER_PACKAGE.publish(MSG)

        rospy.loginfo("executing: %s" % RUN_TYPE)
        __import__('%s' % (RUN_TYPE), fromlist=[RUN_TYPE])
        rospy.loginfo("Galapagos package started.")
        
        while not rospy.is_shutdown():
            TURTLE.move()
            rospy.Rate(5).sleep()
        
        #rospy.spin() # ! Deprecated
    # NOTE: The codes below will never be worked
    # except rospy.ROSInterruptException:
    #     rospy.loginfo('exception occurred')
    #     rospy.signal_shutdown("bye")
    #     pass
    except KeyboardInterrupt:
        print("\r", end='')

    TURTLE.disable()
    MSG.data = "view"
    print(str(PUBLISHER_PACKAGE.publish(MSG)))
    
示例#29
0
def process_frontcam(image):
    global ENABLE_FRONT
    if ENABLE_FRONT is False:
        return
    """ process the image of raspicam """
    global CURRENT_STATE

    raw_data = np.fromstring(image.data, np.uint8)
    cv_img = cv2.imdecode(raw_data, cv2.IMREAD_COLOR)
    # NOTE: ROI Setting
    blob_ROI = cv_img[100:, :]

    if CURRENT_STATE == 'traffic_light':
        if is_light_green(cv_img):
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'left_or_right'
            print_state()
            #TURTLE.set_weight(0.8)
            return
        else:
            print("no green")
            """"
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'left_or_right'
            print_state()
            #TURTLE.set_weight(0.8)
            """
            return
    '''
    if CURRENT_STATE == 'intersection':
        cv2.imshow("blob_ROI",blob_ROI)
        # cv2.waitKey(1)
        print("intersection state")
        if is_intersection(cv_img):
            TURTLE.set_weight(0.8)
            CURRENT_STATE = 'left_or_right'
            print_state()
            return
        else:
            return
    '''

    if CURRENT_STATE == 'left_or_right':
        print("left or right state")
        cv2.imshow("blob_ROI", blob_ROI)
        # cv2.waitKey(1)
        tmp_state = check_left_right_sign(blob_ROI)
        print("tmp state: ", tmp_state)
        if tmp_state == 'right':
            #print("11tmp state: ",tmp_state, ", ri ght cnt: ",inter_right_cnt)
            TURTLE.LINE_BASE = 2
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            CURRENT_STATE = 'inter_curving'
            print_state()
            TURTLE.set_weight(1.0)
        elif tmp_state == 'left':
            #print("11tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            TURTLE.LINE_BASE = 1
            #print("22tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            CURRENT_STATE = 'inter_curving'
            print_state()
            TURTLE.set_weight(1.0)
        elif tmp_state == 'none':
            return

    if CURRENT_STATE == 'inter_curving':
        global COUNT_STRAIGHT
        global ENABLE_LIDAR
        if abs(TURTLE.weight * TURTLE._angular) < 0.1:
            COUNT_STRAIGHT += 1
            print("straight counting : ", COUNT_STRAIGHT, " is counted")
            if COUNT_STRAIGHT > 25:
                COUNT_STRAIGHT = 0
                TURTLE.LINE_BASE = 2
                ENABLE_LIDAR = True
                CURRENT_STATE = "construction"
                print_state()
                return
            else:
                return
        else:
            COUNT_STRAIGHT = 0
            return

    if CURRENT_STATE is "before_parking":
        TURTLE.LINE_BASE = 1
        rospy.Timer(rospy.Duration(10), set_state_to_parking, oneshot=True)
        return
示例#30
0
def process_subcam(image):
    """ process the subcam image """

    if not SCHEDULER.is_subcam_enable():
        return
    if EYE.is_sub_occupied() or TURTLE.is_occupied():
        return

    if SCHEDULER.debug_option["show_timer"]:
        SCHEDULER.check_time("subcam", min=0.25)

    info = EYE.see_sub(image)

    if info is None:
        # rospy.logwarn("[PROC] No Information!")
        return

    center = info["center"]
    slope = info["slope"]

    if slope < -0.5:
        # limit = 1.6
        limit = 1.5  # For slow speed
        amplitude = 1.6
    # elif slope > 0.5:
    #     limit = 1.6
    #     amplitude = 1.2
    else:
        # limit = 1.2
        # amplitude = 1.2
        limit = 1.2  # For slow speed
        amplitude = 1.2  # For slow speed

    if EYE.get_front_state() == "straight":
        if (abs(center) < 30) and slope < -0.4:
            degree = pow(abs(slope) / 1.8, 1.1) * amplitude
        elif center < 0:
            degree = pow(abs(center) / 100, 2.0) * amplitude
        elif center > 0:
            degree = - pow(abs(center) / 100, 2.0) * amplitude
        else:
            degree = 0
    else:
        if (abs(center) < 30) and slope < -0.4:
            degree = pow(abs(slope) / 1.8, 0.9) * amplitude
        elif center < 0:
            degree = pow(abs(center) / 100, 1.9) * amplitude
        elif center > 0:
            degree = - pow(abs(center) / 100, 1.9) * amplitude
        else:
            degree = 0

    buf_sum = sum(BUF_ANGULAR)
    if EYE.get_front_state() == "straight":
        adjust_angular = BUF_ANGULAR.pop(0) * 0.9
        BUF_ANGULAR.append(degree)
        degree -= adjust_angular
        # if abs(buf_sum) > 1:
    else:
        reset_buffer()
        adjust_angular = 0

    degree = max(min(degree, limit), -limit)

    if not info["has_line"]:
        if center < -55:
            # degree = 1.6
            degree = 1.4  # For slow speed
        elif center > 50:
            degree = -1.2
            # degree = -1.2  # For slow speed
        elif center > 10:  # editing...
            degree = -0.9  # editing...
        else:
            degree = 1.4

    # if SCHEDULER.debug_option["show_center_slope"]:
    #     rospy.logdebug(
    #         "[PROC] center: {:.2f}  slope: {:.2f}  degree: {:.2f}  adj: {:.2f}  buf_sum: {:.2f}  {}  {}".format(
    #             center, slope, degree, adjust_angular, buf_sum, EYE.get_front_state(), info["has_line"])
    #     )

    rospy.Timer(
        rospy.Duration(0.15), EYE.release_sub_occupied, oneshot=True
    )
    TURTLE.turn(0.13, degree)