Beispiel #1
0
def initialize():
    """ initialize processing """
    EYE.calibrate()
    TURTLE.set_speed('normal')
    if CURRENT_STATE is "traffic_light":
        TURTLE.disable()
    else:
        TURTLE.enable()
Beispiel #2
0
def process_frontcam(image):
    """ process the frontcam image """
    if not SCHEDULER.is_frontcam_enable():
        return

    STATE = SCHEDULER.get_state()
    info = EYE.see_front(image)

    if SCHEDULER.debug_option["show_front_info"]:
        rospy.logdebug(info)

    if STATE == "default":
        # if EYE.is_boostable(image):
        process_acceleration(info)
        # signal = is_construction(image)
        # rospy.logdebug(signal)

    if STATE == "traffic_light":
        if is_light_green(image):
            TURTLE.enable()
            SCHEDULER.set_state("to_intersection")
        return

    if STATE == "to_intersection":
        signal = check_left_right_sign(image)
        if signal == "right":
            SCHEDULER.set_state("intersection_right")
        elif signal == "left":
            SCHEDULER.set_state("intersection_left")
        return

    if STATE == "intersection_right":
        # TODO: make algorithms for right
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    if STATE == "intersection_left":
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    if STATE == "to_construction":
        if EYE.is_boostable(image):
            TURTLE.boost()

        if is_construction(image):
            SCHEDULER.set_state("construction_searching")

    if STATE == "construction_searching":
        pass
Beispiel #3
0
def test_frontcam(image):
    """ process the frontcam image """
    # print("image is subscribed")
    if not SCHEDULER.is_frontcam_enable():
        return

    if not EYE.is_front_occupied():

        STATE = SCHEDULER.get_state()

        if STATE == "traffic_light":
            signal = is_light_green(image)
            rospy.logdebug("[PROC] is_light_green: {}"
                           .format(signal))
            if signal:
                SCHEDULER.set_state("to_intersection")
                TURTLE.enable()
                TURTLE.set_speed("fast")
                TURTLE.set_speed_smooth("normal")

        elif STATE == "to_intersection":
            signal = check_left_right_sign(image)
            rospy.logdebug("[PROC] left or right: {}"
                           .format(signal))
            if signal == "right":
                SCHEDULER.set_state("intersection_right")
            elif signal == "left":
                SCHEDULER.set_state("intersection_left")
        if SCHEDULER.debug_option["show_center_slope"]:
            SCHEDULER.check_time("frontcam", min=0.5)
        info = EYE.see_front(image)
        if info is None:
            return

        # rospy.logdebug("[PROC] test_frontcam: state {}  {}".format(
        #     info["state"], info["horizon_position"]))
        # if (info["horizon_position"] < 150):
        #     TURTLE.set_speed("fast")
        #     TURTLE.set_speed_smooth("normal")

        if (info["state"] == "turning") and (info["turning_to"] is not "None"):
            rospy.logdebug(
                "[PROC] turn off for 1.5s")
            rospy.Timer(rospy.Duration(1.5),
                        EYE.release_front_occupied, oneshot=True)
        else:
            rospy.Timer(rospy.Duration(0.1),
                        EYE.release_front_occupied, oneshot=True)
Beispiel #4
0
def process_frontcam(image):
    global ENABLE_FRONT
    if ENABLE_FRONT is False:
        return
    """ process the image of raspicam """
    global CURRENT_STATE

    raw_data = np.fromstring(image.data, np.uint8)
    cv_img = cv2.imdecode(raw_data, cv2.IMREAD_COLOR)
    # NOTE: ROI Setting
    blob_ROI = cv_img[100:, :]

    if CURRENT_STATE == 'traffic_light':
        if is_light_green(cv_img):
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'left_or_right'
            print_state()
            #TURTLE.set_weight(0.8)
            return
        else:
            print("no green")
            """"
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'left_or_right'
            print_state()
            #TURTLE.set_weight(0.8)
            """
            return
    '''
    if CURRENT_STATE == 'intersection':
        cv2.imshow("blob_ROI",blob_ROI)
        # cv2.waitKey(1)
        print("intersection state")
        if is_intersection(cv_img):
            TURTLE.set_weight(0.8)
            CURRENT_STATE = 'left_or_right'
            print_state()
            return
        else:
            return
    '''

    if CURRENT_STATE == 'left_or_right':
        print("left or right state")
        cv2.imshow("blob_ROI", blob_ROI)
        # cv2.waitKey(1)
        tmp_state = check_left_right_sign(blob_ROI)
        print("tmp state: ", tmp_state)
        if tmp_state == 'right':
            #print("11tmp state: ",tmp_state, ", ri ght cnt: ",inter_right_cnt)
            TURTLE.LINE_BASE = 2
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            CURRENT_STATE = 'inter_curving'
            print_state()
            TURTLE.set_weight(1.0)
        elif tmp_state == 'left':
            #print("11tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            TURTLE.LINE_BASE = 1
            #print("22tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            CURRENT_STATE = 'inter_curving'
            print_state()
            TURTLE.set_weight(1.0)
        elif tmp_state == 'none':
            return

    if CURRENT_STATE == 'inter_curving':
        global COUNT_STRAIGHT
        global ENABLE_LIDAR
        if abs(TURTLE.weight * TURTLE._angular) < 0.1:
            COUNT_STRAIGHT += 1
            print("straight counting : ", COUNT_STRAIGHT, " is counted")
            if COUNT_STRAIGHT > 25:
                COUNT_STRAIGHT = 0
                TURTLE.LINE_BASE = 2
                ENABLE_LIDAR = True
                CURRENT_STATE = "construction"
                print_state()
                return
            else:
                return
        else:
            COUNT_STRAIGHT = 0
            return

    if CURRENT_STATE is "before_parking":
        TURTLE.LINE_BASE = 1
        rospy.Timer(rospy.Duration(10), set_state_to_parking, oneshot=True)
        return
def process_front_image(image):
    """ process the image of raspicam """
    global CURRENT_STATE
    global MOVING_POSITION
    global SEEN_PARKING_SIGN
    global SEEN_TUNNEL_SIGN

    raw_data = np.fromstring(image.data, np.uint8)
    cv_img = cv2.imdecode(raw_data, cv2.IMREAD_COLOR)

    #### ROI SETTING ######
    blob_ROI = cv_img[100:, :]
    #######################
    if CURRENT_STATE == 'traffic_light':
        if is_light_green(cv_img):
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            CURRENT_STATE = 'intersection'
            return
        else:
            return

    if CURRENT_STATE == 'intersection':
        if is_intersection(cv_img):
            TURTLE.set_weight(0.7)
            CURRENT_STATE = 'left_or_right'
            return
        else:
            return


    if CURRENT_STATE == 'left_or_right':
        tmp_state = check_left_right_sign(blob_ROI)
        if tmp_state == 'right':
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            TURTLE.LINE_BASE = 2
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'left':
            #print("11tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            TURTLE.LINE_BASE = 1
            #print("22tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'none':
            return
    
    if CURRENT_STATE == 'inter_curving':
        global straight_cnt
        if abs(TURTLE.weight*TURTLE._angular) < 0.1:
            straight_cnt += 1
            if straight_cnt > 3 :
                straight_cnt = 0
                TURTLE.LINE_BASE = 3
                CURRENT_STATE = 'construct_recog'
                return
            else:
                return
        else:
            return

    if CURRENT_STATE == 'construct_recog':
        tmp_state = is_construction(blob_ROI)
        if tmp_state is True:
            TURTLE.LINE_BASE = 2
            CURRENT_STATE = 'construction_ready'
        else:
            return

    if CURRENT_STATE == 'construction_ready':
        global curving_cnt
        if abs(TURTLE.weight*TURTLE._angular) > 0.25 :
            curving_cnt += 1
            if curving_cnt > 5 :
                curving_cnt = 0
                CURRENT_STATE = 'construction'
                TURTLE.enable_fish = False
            else:
                return
        else:
            return

    if CURRENT_STATE == 'construction':
        '''
        task for Ji-hyung
        '''
    '''
Beispiel #6
0
def process_frontcam(image):
    """ process the frontcam image """
    if not SCHEDULER.is_frontcam_enable():
        return

    state = SCHEDULER.get_state()
    info = EYE.see_front(image)

    if SCHEDULER.debug_option["show_front_info"]:
        rospy.logdebug(info)

    if state == "default":
        # if EYE.is_boostable(image):
        process_acceleration(info)
        # SCHEDULER.set_state("to_intersection")
        # signal = is_construction(image)
        # rospy.logdebug(signal)
        # if is_construction(image):
        #     TURTLE.boost()
        #     SCHEDULER.set_state("construction")

    if state == "traffic_light":
        if is_light_green(image):
            TURTLE.enable()
            SCHEDULER.set_state("to_intersection")
        return

    # NOTE: temporary settings:
    if state == "to_intersection":
        # rospy.Timer(rospy.Duration(35), SCHEDULER.enable_lidar, oneshot=True)
        SCHEDULER.set_state("intersection_left")

    # if state == "to_intersection":
    #     signal = check_left_right_sign(image)
    #     if signal == "right":
    #         SCHEDULER.set_state("intersection_right")
    #     elif signal == "left":
    #         SCHEDULER.set_state("intersection_left")
    #     return

    if state == "intersection_right":
        # TODO: make algorithms for right
        if EYE.is_boostable(image):
            TURTLE.boost()
            SCHEDULER.set_state("to_construction")
        return

    # NOTE: temporary settings:
    if state == "intersection_left":
        # if EYE.is_boostable(image):
        #     TURTLE.boost()
        #     SCHEDULER.set_state("to_construction")
        return

    if state == "to_construction":
        # if EYE.is_boostable(image):
        # TURTLE.boost()
        EYE.check_yellow = True
        # if is_construction(image):
        # SCHEDULER.set_state("construction_searching")

    if state == "construction_searching":
        pass
Beispiel #7
0
def process_frontcam(image):
    """ process the image of raspicam """
    global CURRENT_STATE
    global MOVING_POSITION
    global SEEN_PARKING_SIGN
    global SEEN_TUNNEL_SIGN
    global LIDAR_FLAG

    raw_data = np.fromstring(image.data, np.uint8)
    cv_img = cv2.imdecode(raw_data, cv2.IMREAD_COLOR)

    #### ROI SETTING ######
    blob_ROI = cv_img[100:, :]
    #######################
    if CURRENT_STATE == 'traffic_light':
        if is_light_green(cv_img):
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'intersection'
            #TURTLE.set_weight(0.8)
            return
        else:
            print("no green")
            TURTLE.enable()
            #TURTLE.set_speed('fast')
            print("detected green")
            CURRENT_STATE = 'intersection'
            #TURTLE.set_weight(0.8)
            return

    if CURRENT_STATE == 'intersection':
        cv2.imshow("blob_ROI", blob_ROI)
        # cv2.waitKey(1)
        print("intersection state")
        if is_intersection(cv_img):
            TURTLE.set_weight(0.8)
            CURRENT_STATE = 'left_or_right'
            print("intersection detected!!")
            return
        else:
            return

    if CURRENT_STATE == 'left_or_right':
        print("left or right state")
        cv2.imshow("blob_ROI", blob_ROI)
        # cv2.waitKey(1)
        tmp_state = check_left_right_sign(blob_ROI)
        print("tmp state: ", tmp_state)
        if tmp_state == 'right':
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            TURTLE.LINE_BASE = 2
            #print("11tmp state: ",tmp_state, ", right cnt: ",inter_right_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'left':
            #print("11tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            TURTLE.LINE_BASE = 1
            #print("22tmp state: ",tmp_state, ", left cnt: ",inter_left_cnt)
            CURRENT_STATE = 'inter_curving'
            TURTLE.set_weight(1.0)
        elif tmp_state == 'none':
            return

    if CURRENT_STATE == 'inter_curving':
        print("#################################################")
        print("########### inter_curving state #################")
        print("#################################################")
        global straight_cnt
        if abs(TURTLE.weight * TURTLE._angular) < 0.1:
            straight_cnt += 1
            print("straight counting : ", straight_cnt, " is counted")
            if straight_cnt > 5:
                straight_cnt = 0
                TURTLE.LINE_BASE = 2
                CURRENT_STATE = 'construction'
                return
            else:
                return
        else:
            straight_cnt = 0
            return

    if CURRENT_STATE == 'construct_recog':
        tmp_state = is_construction(blob_ROI)
        print(tmp_state)
        if tmp_state is True:
            TURTLE.LINE_BASE = 2
            CURRENT_STATE = 'construction'
            #LIDAR_FLAG = True
        else:
            return

    if CURRENT_STATE == 'construction':
        return

    # if CURRENT_STATE == 'construction':
    #     '''
    #     task for Ji-hyung
    #     '''

    #     TURTLE.LINE_BASE = 1
    #     CURRENT_STATE = 'parking'
    #     pass
    '''