Exemplo n.º 1
0
    def __init__(self, hz=10):
        
        self.rate = rospy.Rate(hz)
        self.pub = rospy.Publisher('xycar_motor', xycar_motor, queue_size=1)
        self.msg = xycar_motor()

        self.timer = Timer()
        self.sensor = XycarSensor()
        yaw0 = self.sensor.init(self.rate)

        self.obstacle_detector = ObstacleDetector(self.timer)
        self.lane_detector = LaneDetector()
        self.stopline_detector = StopLineDetector()

        self.mode_controller = ModeController(yaw0, self.timer)
        self.stanley_controller = StanleyController(self.timer)
        self.ar_controller = ARController()

        steer_img = cv2.imread(rospy.get_param("steer_img"))
        self.steer_img = cv2.resize(steer_img, dsize=(0,0), fx=0.2, fy=0.2)

        self.target_lane = 'middle'
        self.control_dict = {
            'long straight' : self.stanley,
            'short straight': self.stanley,
            'obstacle': self.obstacle,
            'stopline': self.stopline,
            'curve': self.stanley,
            'findparking': self.findparking,
            'parallelparking' : self.parallelpark,
            'arparking': self.arparking,
            'poweroff' : self.poweroff
        }
Exemplo n.º 2
0
def drive(Angle, Speed):
    print(Angle, Speed)
    global pub
    msg = xycar_motor()
    msg.angle = Angle
    msg.speed = Speed
    pub.publish(msg)
Exemplo n.º 3
0
def auto_drive(steer):
	global pub
	global car_run_speed		

	x_m = xycar_motor()
	x_m.angle = steer
	x_m.speed = car_run_speed
	pub.publish(x_m)
Exemplo n.º 4
0
def auto_drive(pid, speed):
    global pub
    #global car_run_speed

    msg = xycar_motor()
    msg.angle = pid
    msg.speed = speed
    pub.publish(msg)
def drive(Angle, Speed):
    global pub

    msg = xycar_motor()
    msg.angle = Angle
    msg.speed = Speed  #receive Angle, Speed

    pub.publish(msg)  #publish xycar_motor topic to motor node
Exemplo n.º 6
0
def drive(angle):
    global pub
    motor_msg = xycar_motor()
    motor_msg.header = Header()
    motor_msg.header.stamp = rospy.Time.now()
    motor_msg.speed = 10
    motor_msg.angle = -angle * 0.4
    pub.publish(motor_msg)
Exemplo n.º 7
0
def auto_drive(steer, speed):
    global pub
    #motor global??
    msg = xycar_motor()
    msg.angle = steer
    #msg.angle = 0.34
    msg.speed = speed
    #msg.speed = 0
    pub.publish(msg)
Exemplo n.º 8
0
def motor_pub(angle, speed):
    # global pub
    motor_control = xycar_motor()
    motor_control.header = Header()
    motor_control.header.stamp = rospy.Time.now()
    motor_control.angle = angle
    motor_control.speed = speed

    pub.publish(motor_control)
Exemplo n.º 9
0
def auto_drive(pid):
    global pub
    global car_run_speed

    msg = xycar_motor()
    msg.angle = pid
    #msg.angle = 0.34
    msg.speed = car_run_speed
    #msg.speed = 0
    pub.publish(msg)
Exemplo n.º 10
0
    def __init__(self, mode='start', hz=10):

        self.rate = rospy.Rate(hz)
        self.mode = mode

        # ultrasonic
        self.ultrasonic = None
        self.filter = Filter()
        self.sub_ultrasonic = rospy.Subscriber('xycar_ultrasonic',
                                               Int32MultiArray,
                                               self.callback_ultrasonic)

        # image
        self.img = None
        self.bridge = CvBridge()
        self.sub_img = rospy.Subscriber("/usb_cam/image_raw/", Image,
                                        self.callback_img)

        # qrcode
        self.qr_dict = {
            '1': 'ultrasonic',
            '2': 'turn',
            '3': 'dqn',
            '4': 'bottle',
            '5': 'yolo',
            '6': 'pottedplant',
            '7': 'command1',
            '8': 'command2',
            '9': 'park'
        }

        # motor
        self.pub = rospy.Publisher('xycar_motor', xycar_motor, queue_size=1)
        self.msg = xycar_motor()

        # controllers
        self.controller_ultrasonic = Ultrasonic()
        self.controller_DQN = DQN()
        self.controller_yolo = Yolo('bottle')
        self.controller_ar = AR()
        self.control_dict = {
            'start': self.start,
            'ultrasonic': self.ultrasonic_control,
            'turn': self.turn_control,
            'dqn': self.dqn_control,
            'yolo': self.yolo_control,
            'park': self.ar_control
        }
Exemplo n.º 11
0
def main():
    global pub, motor_control
    global distance

    motor_control = xycar_motor()

    rospy.init_node('ar_drive_info')
    rospy.Subscriber('ar_pose_marker', AlvarMarkers, callback, queue_size=1)
    pub = rospy.Publisher('xycar_motor', xycar_motor, queue_size=1)

    while not rospy.is_shutdown():
        distance = math.sqrt(
            math.pow(arData["DX"], 2) + math.pow(arData["DZ"], 2))
        (roll, pitch, yaw) = euler_from_quaternion(
            (arData["AX"], arData["AY"], arData["AZ"], arData["AW"]))

        #print("distance: {}".format(distance))
        #print("dx: {}, dy: {}, yaw: {}".format(arData["DX"], arData["DZ"], pitch))
        park_start(distance, arData["DX"], arData["DZ"], pitch, arData["bool"])
Exemplo n.º 12
0
    motor_pub.publish(motor_msg)


if __name__ == '__main__':
    basic_speed = 14
    basic_angle = 0

    angle = basic_angle
    speed = basic_speed

    hidden_layer = [256, 256]

    study_init(6, hidden_layer, 0.001, "DQN")
    view_epi = 203
    dqn_package_path = rospkg.RosPack().get_path('dqn')
    os.chdir(dqn_package_path)

    study_model_load(view_epi)
    #time.sleep(0.5)

    rospy.init_node("dqn")
    motor_msg = xycar_motor()
    rospy.Subscriber("/xycar_ultrasonic",
                     Int32MultiArray,
                     ultrasonic_callback,
                     queue_size=1)
    motor_pub = rospy.Publisher('xycar_motor', xycar_motor, queue_size=1)

    rospy.spin()
Exemplo n.º 13
0
def main():
    global pub, motor_control
    global frame, bridge, height, width, mid_x
    global arData, obs_dist
    global linecount, obstacle, tmp, stop
    global Kp_std, Kp_std_s, k_std, Kp_obs, k_obs, delta_obs, vel_obs, Kp_park, k_park, k_std_obs
    global std_low_th, std_high_th, s_low_th, s_high_th
    global lap, b_lap, stop_min, stop_max
    global lidar_dist_init, lidar_dist_obs

    bridge = CvBridge()
    motor_control = xycar_motor()

    rospy.init_node('drive')
    rospy.Subscriber("/usb_cam/image_raw", Image, img_callback, queue_size=1)
    rospy.Subscriber('ar_pose_marker', AlvarMarkers, callback, queue_size=1)
    rospy.Subscriber("/scan", LaserScan, lidar_callback, queue_size=1)
    rospy.Subscriber('xycar_ultrasonic',
                     Int32MultiArray,
                     ultra_callback,
                     queue_size=1)
    pub = rospy.Publisher('/xycar_motor', xycar_motor, queue_size=1)

    # out = cv2.VideoWriter(video_name, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 10, (640, 480))

    start_time = time.time()
    stop_time = time.time()
    while not rospy.is_shutdown():
        if frame.size != (640 * 480 * 3):
            continue
        height, width = frame.shape[0:2]
        mid_x = width // 2

        # ========== Image Processing ==========
        if tmp == False:
            canny = Driving.canny_edge(frame, s_low_th, s_high_th)
        elif tmp == True:
            canny = Driving.canny_edge(frame, std_low_th, std_high_th)
        roi = Driving.set_roi(canny)
        warp_img, M, Minv, area = Driving.perspective_img(roi)
        hough = cv2.HoughLinesP(warp_img,
                                1,
                                np.pi / 180,
                                90,
                                np.array([]),
                                minLineLength=50,
                                maxLineGap=20)
        if hough is not None:
            lines, direction = Driving.calculate_lines(warp_img, hough)
        # print("lap: {}".format(lap))
        if b_lap > 3:
            print("----- Find Parking Lot -----")
            distance = math.sqrt(
                math.pow(arData["DX"], 2) + math.pow(arData["DZ"], 2))
            # print("distance: {}", distance)
            (roll, pitch, yaw) = euler_from_quaternion(
                (arData["AX"], arData["AY"], arData["AZ"], arData["AW"]))
            if 0.6 < distance <= 1.7:
                steer = math.atan2(arData["DX"], arData["DZ"])
                motor_control.angle = math.degrees(steer) * 1.5
                motor_control.speed = parking_vel
            elif 0.1 < distance <= 0.6:
                ar_parking(distance, arData["DX"], arData["DZ"], pitch)
            else:
                delta, v = Driving.lane_keeping(lines, direction, Kp_park,
                                                k_park)
                motor_control.angle = delta
                motor_control.speed = 30
            print("Distance: {}".format(distance))
            pub.publish(motor_control)
        else:
            # ========== crosswalk stop ==========
            stop, image, cnt_nzero = stopline_canny.detect_stopline(
                frame, stop_min, stop_max, stop_time)
            if stop == True:
                print("----- Crosswalk stop -----")
                tmp_time = time.time()
                while time.time() - tmp_time <= 5:
                    motor_control.angle = 7
                    motor_control.speed = 0
                    pub.publish(motor_control)
                stop_time = time.time()
                tmp = False
                obstacle = False
                lap += 1

            # ========== lidar avoid drive ==========
            if obstacle == False:
                [lidar_obs_count, lidar_count_l, lidar_count_r
                 ] = Avoiding.obstacle_cnt(obs_dist, lidar_dist_init, 40)
                # [lidar_obs_count, lidar_count_l, lidar_count_r] = Avoiding.obstacle_cnt(obs_dist, lidar_dist_init, 20)
            elif obstacle == True:
                [lidar_obs_count, lidar_count_l, lidar_count_r
                 ] = Avoiding.obstacle_cnt(obs_dist, lidar_dist_obs, 55)
            if lidar_count_l > lidar_count_r + 5 and tmp == True:
                motor_control.angle = delta_obs
                motor_control.speed = vel_obs
                obstacle = True
            elif lidar_count_r > lidar_count_l + 5 and tmp == True:
                motor_control.angle = -delta_obs
                motor_control.speed = vel_obs
                obstacle = True
            else:
                # ========== standard drive ==========
                if obstacle == False:
                    if tmp == True:
                        delta, v = Driving.lane_keeping(
                            lines, direction, Kp_std, k_std_obs)
                    elif tmp == False:
                        delta, v = Driving.lane_keeping(
                            lines, direction, Kp_std, k_std)
                # ========== lidar avoid drive ==========
                elif obstacle == True and tmp == True:
                    delta, _ = Driving.lane_keeping(lines, direction, Kp_obs,
                                                    k_obs)
                    v = vel_obs
                motor_control.angle = delta
                motor_control.speed = v
            pub.publish(motor_control)
Exemplo n.º 14
0
#!/usr/bin/env python

import rospy
import time
from xycar_motor.msg import xycar_motor

motor_control = xycar_motor()

rospy.init_node('auto_driver')

pub = rospy.Publisher('xycar_motor', xycar_motor, queue_size=1)

def motor_pub(angle, speed):
    # global pub
    global motor_control

    motor_control.angle = angle
    motor_control.speed = speed

    pub.publish(motor_control)

speed = 3
turn_angle = 30
straight_angle = 0
rate = rospy.Rate(50)

while not rospy.is_shutdown():

    # 좌회전 또는 우회전
    for _ in range(0,700):
        motor_pub(turn_angle,speed)
Exemplo n.º 15
0
class ROS:

    bridge = CvBridge()
    motor_msg = xycar_motor()
    ultrasonic_data = {
        "FL": 0,
        "FM": 0,
        "FR": 0,
        "L": 0,
        "BL": 0,
        "BM": 0,
        "BR": 0,
        "R": 0
    }
    imu_data = {"x": 0.0, "y": 0.0, "z": 0.0, "w": 0.0}
    cam_image = np.empty(shape=[0])
    darknet_detect_cnt = 0
    darknet_image = np.empty(shape=[0])
    bounding_boxes = []
    ar_tags = []

    mtx = np.array([[422.037858, 0.0, 245.895397],
                    [0.0, 435.589734, 163.625535], [0.0, 0.0, 1.0]])
    dist = np.array([-0.289296, 0.061035, 0.001786, 0.015238, 0.0])
    cal_mtx, cal_roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (640, 480), 1,
                                                     (640, 480))

    def __init__(self, name, cal=True):
        rospy.init_node(name)
        self.calibration = cal
        self.motor_pub = rospy.Publisher("xycar_motor",
                                         xycar_motor,
                                         queue_size=1)
        rospy.Subscriber("xycar_ultrasonic", Int32MultiArray,
                         self.ultrasonic_callback)
        rospy.Subscriber("imu", Imu, self.imu_callback)
        rospy.Subscriber("usb_cam/image_raw", Image, self.camera_callback)
        rospy.Subscriber("/darknet_ros/found_object", ObjectCount,
                         self.darknet_ros_detect_object_cnt)
        rospy.Subscriber("/darknet_ros/bounding_boxes", BoundingBoxes,
                         self.darknet_bounding_box_data)
        rospy.Subscriber("/darknet_ros/detection_image", Image,
                         self.darknet_ros_image_callback)
        rospy.Subscriber("ar_pose_marker", AlvarMarkers, self.ar_tags_data)

    def get_ultrasonic_data(self):
        return self.ultrasonic_data

    def get_imu_data(self):
        return self.imu_data

    def get_camera_image_data(self):
        return self.cam_image

    def get_darknet_detect_count_data(self):
        return self.darknet_detect_cnt

    def get_darknet_image_data(self):
        return self.darknet_image

    def get_darknet_bounding_boxes(self):
        return self.bounding_boxes

    def get_ar_tags_datas(self):
        return self.ar_tags

    def set_motor(self, angle, speed):
        self.motor_msg.header.stamp = rospy.Time.now()
        self.motor_msg.angle = angle
        self.motor_msg.speed = speed
        self.motor_pub.publish(self.motor_msg)

    def ultrasonic_callback(self, data):
        self.ultrasonic_data["L"] = data.data[0]
        self.ultrasonic_data["FL"] = data.data[1]
        self.ultrasonic_data["FM"] = data.data[2]
        self.ultrasonic_data["FR"] = data.data[3]
        self.ultrasonic_data["R"] = data.data[4]
        self.ultrasonic_data["BR"] = data.data[5]
        self.ultrasonic_data["BM"] = data.data[6]
        self.ultrasonic_data["BL"] = data.data[7]

    def imu_callback(self, data):
        self.imu_data["x"] = data.orientation.x
        self.imu_data["y"] = data.orientation.y
        self.imu_data["z"] = data.orientation.z
        self.imu_data["w"] = data.orientation.w

    def calibrate_image(self, image):
        tf_image = cv2.undistort(image, self.mtx, self.dist, None,
                                 self.cal_mtx)
        x, y, w, h = self.cal_roi
        tf_image = tf_image[y:y + h, x:x + w]

        return cv2.resize(tf_image, (640, 480))

    def camera_callback(self, data):
        image = self.bridge.imgmsg_to_cv2(data, "bgr8")
        if self.calibration:
            self.cam_image = self.calibrate_image(image)
        else:
            self.cam_image = image

    def darknet_ros_detect_object_cnt(self, data):
        self.darknet_detect_cnt = data.count

    def darknet_ros_image_callback(self, data):
        self.darknet_image = self.bridge.imgmsg_to_cv2(data, "bgr8")

    def darknet_bounding_box_data(self, data):
        d = []
        b = {
            "probability": 0.0,
            "xmin": 0,
            "ymin": 0,
            "xmax": 0,
            "ymax": 0,
            "id": 0,
            "class": ""
        }
        for bb in data.bounding_boxes:
            b["probability"] = bb.probability
            b["xmin"] = bb.xmin
            b["ymin"] = bb.ymin
            b["xmax"] = bb.xmax
            b["ymax"] = bb.ymax
            b["id"] = bb.id
            b["class"] = bb.Class
            d.append(b)
        self.bounding_boxes = d

    def ar_tags_data(self, data):
        d = []
        b = {
            "id": 0,
            "pos_x": 0.0,
            "pos_y": 0.0,
            "pos_z": 0.0,
            "ori_x": 0.0,
            "ori_y": 0.0,
            "ori_z": 0.0,
            "ori_w": 0.0
        }
        for bb in data.markers:
            b["id"] = bb.id
            b["pos_x"] = bb.pose.pose.position.x
            b["pos_y"] = bb.pose.pose.position.y
            b["pos_z"] = bb.pose.pose.position.z
            b["ori_x"] = bb.pose.pose.orientation.x
            b["ori_y"] = bb.pose.pose.orientation.y
            b["ori_Z"] = bb.pose.pose.orientation.z
            b["ori_w"] = bb.pose.pose.orientation.w
            d.append(b)
        self.ar_tags = d

    def get_shutdown(self):
        return not rospy.is_shutdown()
Exemplo n.º 16
0
def main():
    global frame, bridge
    global height, width, mid_x
    global prev_lx, prev_rx

    bridge = CvBridge()
    motor_control = xycar_motor()

    rospy.init_node('lane_detect')
    rospy.Subscriber("/usb_cam/image_raw", Image, img_callback, queue_size=1)
    pub = rospy.Publisher('/xycar_motor', xycar_motor, queue_size=1)

    # out = cv2.VideoWriter(video_name, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 10, (640, 480))
    # f = open(csv_name, 'w')
    # wr = csv.writer(f)
    # wr.writerow(['ts_micro', 'frame_index', 'wheel'])

    while not rospy.is_shutdown():
        # if curr_angle == 99999:
        #     continue
        if frame.size != (640 * 480 * 3):
            continue
        height, width = frame.shape[0:2]
        mid_x = width // 2

        #============== image transform ==============
        canny = canny_edge(frame, 50, 150)
        roi = set_roi(canny)
        warp_img, M, Minv, area = perspective_img(roi)
        #============== Hough Line Transform ==============
        hough = cv2.HoughLinesP(warp_img,
                                1,
                                np.pi / 180,
                                120,
                                np.array([]),
                                minLineLength=10,
                                maxLineGap=20)
        if hough is not None:
            lines, direction = calculate_lines(warp_img, hough)

        warp_img = cv2.cvtColor(warp_img, cv2.COLOR_GRAY2BGR)
        lines_visualize = visualize_lines(warp_img, lines)
        warp_img = cv2.addWeighted(warp_img, 0.9, lines_visualize, 1, 1)
        direction_visualize = visualize_direction(warp_img, direction)
        warp_img = cv2.addWeighted(warp_img, 0.9, direction_visualize, 1, 1)
        warp_img = cv2.circle(warp_img, (int(prev_lx), 240), 5, (255, 0, 0),
                              -1)
        warp_img = cv2.circle(warp_img, (int(prev_rx), 240), 5, (255, 0, 0),
                              -1)
        roi = cv2.addWeighted(roi, 0.9, area, 1, 1)

        cv2.imshow('warp', warp_img)
        cv2.imshow('result', roi)
        Kp = 1.1
        k = 0.005

        delta, v = lane_keeping(lines, direction, Kp, k)

        motor_control.angle = delta
        motor_control.speed = v
        pub.publish(motor_control)

        # wr.writerow([time.time(), cnt, curr_angle])
        # out.write(roi)
        cv2.waitKey(1)