Exemplo n.º 1
0
def readIRSensors():
    robotControl.getSensors(ser)
    result, msgType = robotControl.readResponse(
        ser)  # Need dummy read for IR sensors.
    time.sleep(0.5)
    left = 0
    middle = 0
    right = 0
    for i in range(0, 6):
        robotControl.getSensors(ser)
        result, msgType = robotControl.readResponse(ser)
        time.sleep(0.5)
        left = left + result[0]
        middle = middle + result[1]
        right = right + result[2]
    left = left / 5
    middle = middle / 5
    right = right / 5

    sensors = []
    sensors.append((leftM_prime / (left + leftQ_prime) - 0.42))
    sensors.append((middleM_prime / (middle + middleQ_prime) - 0.42))
    sensors.append((rightM_prime / (right + rightQ_prime) - 0.42))

    return sensors
Exemplo n.º 2
0
def reverseMoves(moveList):
    if len(moveList) == 0:
        return

    max = len(moveList)
    #180 degrees
    robotControl.rotateRobot(ser, (math.pi))
    temp_result = robotControl.readResponse(ser)
    time.sleep(1)
    for x in range(max - 1, -1, -1):
        move = moveList[x]

        if 'L' in move:
            #move right
            robotControl.rotateRobot(ser, 25 * (math.pi / 24))
        elif 'R' in move:
            #move left
            robotControl.rotateRobot(ser, (math.pi / 24))
        elif 'F' in move:
            #move forward
            robotControl.moveRobotXY(5, 0, ser)
        temp_result = robotControl.readResponse(ser)
        time.sleep(1)
    robotControl.rotateRobot(ser, (math.pi))
    temp_result = robotControl.readResponse(ser)
    time.sleep(1)
Exemplo n.º 3
0
def check_end(): #if we see our end flag we chase it
    for x in range(0,2): #performing 180 checks 
        robotControl.rotateRobot(ser, pi+pi/8)
        results, msgType = robotControl.readResponse(ser)
        tracked = cap.see_obj()
        grabGoal = cap.areWeHome()
        if grabGoal or tracked:
            return (grabGoal,tracked)
    time.sleep(.2)
    robotControl.rotateRobot(ser, pi/4)
    results, msgType = robotControl.readResponse(ser)
    return (0,0)
Exemplo n.º 4
0
def getSensors():
    time.sleep(0.5)
    robotControl.getSensors(ser)
    sensors, msgType = robotControl.readResponse(ser)
    #print (sensors)
    #print("We here we here: ")

    if msgType == 3:
        #print(sensors)
        DLS = int(LXD[0]*sensors[0]**5 + LXD[1]*sensors[0]**4 + LXD[2]*sensors[0]**3 + LXD[3]*sensors[0]**2 + LXD[4]*sensors[0] + LXD[5])
        DMS = int(MXD[0]*sensors[1]**5 + MXD[1]*sensors[1]**4 + MXD[2]*sensors[1]**3 + MXD[3]*sensors[1]**2 + MXD[4]*sensors[1] + MXD[5])
        DRS = int(RXD[0]*sensors[2]**5 + RXD[1]*sensors[2]**4 + RXD[2]*sensors[2]**3 + RXD[3]*sensors[2]**2 + RXD[4]*sensors[2] + RXD[5])
        print(DLS,DMS,DRS)
        #convert to bools 
        
        if DLS > 15:
            DLS = 0
        else:
            DLS = 1
        if DMS > 12:
            DMS = 0
        else:
            DMS = 1
        if DRS > 12:
            DRS = 0
        else:
            DRS = 1
        return(DLS,DMS,DRS)
    else:
        L,M,R = range_sensor_values()
Exemplo n.º 5
0
def moveFWD():
    robotControl.moveRobotXY(10,0,ser)
    results, msgType = robotControl.readResponse(ser)
    L1,M1,R1 = getSensorsInCM()
    if L1 not in range (-2, 5):
        while L1 > 9:
            results = robotControl.rotateRobot(ser, pi/45.7)
            results, msgType = robotControl.readResponse(ser)
            print("Adjusting left!")
            L1, M1, R1 = getSensorsInCM()
        while L1 < 0:
            results = robotControl.rotateRobot(ser, pi+pi/45.7)
            results, msgType = robotControl.readResponse(ser)
            print("Adjusting right!")
            L1, M1, R1 = getSensorsInCM()
	
    return results
Exemplo n.º 6
0
def sensor_Average():
    L_tot = 0
    M_tot = 0
    R_tot = 0
    for i in range(0, 10):
        robotControl.getSensors(ser)
        result, msgType = robotControl.readResponse(ser)
        while msgType != 3:
            robotControl.getSensors(ser)
            result, msgType = robotControl.readResponse(ser)
        L_tot = result[0] + L_tot
        M_tot = result[1] + M_tot
        R_tot = result[2] + R_tot
    L_avg = L_tot / 10
    M_avg = M_tot / 10
    R_avg = R_tot / 10

    return L_avg, M_avg, R_avg
Exemplo n.º 7
0
def motors(q):
    # Connect to the microcontroller.
    serM = serial.Serial("/dev/serial0", 115200)
    print("Making connection to Microcontroller.")

    # Set the wheel speed.
    robotControl.changePWMvalues(serM, 150, 125)
    sleep(1)

    while True:
        # Read a command from the communication thread.
        command = q.get()
        print("Got a command.")

        # Check the command, act accordingly.
        # Packets in order of [0xDA, SRC, ACT, PARAM, 0xDB];
        # But we will just pass ACT and PARAM through queue.
        ACT = command[0]
        PARAM = command[1]

        if (ACT == 0x1):
            robotControl.moveRobotXY(serM, PARAM, 0)
            result, msgType = robotControl.readResponse(serM)
            if (result.obsFlag == 1):
                print("Obstacle detected!")
                robotControl.rotateRobot(serM, m.pi)
                result, msgType = robotControl.readResponse(serM)
            print("Moving forward by ", PARAM)
        elif (ACT == 0x2):
            robotControl.rotateRobot(serM, m.pi * 2 - (PARAM * m.pi / 180))
            result, msgType = robotControl.readResponse(serM)
            print("Turning left by ", PARAM)
        elif (ACT == 0x3):
            robotControl.rotateRobot(serM, (PARAM * m.pi / 180))
            result, msgType = robotControl.readResponse(serM)
            print("Turning right by ", PARAM)
        else:
            print("Robot does not need to move.")
            break
        # We do not care about data concerning the new Simon,
        # or acknowledges in this process.

    serM.close()
Exemplo n.º 8
0
def grabNood():
    global search_for
    print("Turning and Grabbing the payload")
    robotControl.rotateRobot(ser, math.pi)
    result = robotControl.readResponse(ser)

    robotControl.moveRobotBackX(ser, 10)
    result = robotControl.readResponse(ser)

    print("Going home with the object")

    robotControl.moveRobotXY(20, 0, ser)
    result = robotControl.readResponse(ser)

    search_for = blue_goal

    if search_for == blue_goal:
        print("Now looking for goal")

    return search_for
Exemplo n.º 9
0
def check_goal_180():	
    
    for x in range(0,8): #performing 180 checks 
        robotControl.rotateRobot(ser, pi+pi/8)
        results, msgType = robotControl.readResponse(ser)
	time.sleep(.2)
	#checking Goes here
	#add break for when we chase an object 
    turnL()
    turnL()
    return (0,0,[])
Exemplo n.º 10
0
def rotate_Sequence():
    L_avg, M_avg, R_avg = sensor_Average()
    for j in range(0, 16):
        comm.readPacket(radio, 3)
        if L_avg > R_avg:
            angle = (math.pi / 16)
        elif L_avg < R_avg:
            angle = (math.pi / 8) + math.pi
        robotControl.rotateRobot(ser, angle)
        result, msgType = robotControl.readResponse(ser)
        checkFlag, x_center, object_area = camera_capture()
        if checkFlag == 1:
            # Line up robot with goal
            print("Checking for difference in x - rotate sequence block")
            value = x_center - xc
            print("Difference from center:", value)
            object_detected = False
            while not object_detected:
                comm.readPacket(radio, 3)
                xc_low, xc_high = Bound(object_area)
                if x_center < (xc_low):
                    print("LEFT!")
                    robotControl.rotateRobot(ser, pi / 30)
                    result, msgType = robotControl.readResponse(ser)
                # Object to Left of center, rotate right
                elif x_center > (xc_high):
                    print("RIGHT!")
                    robotControl.rotateRobot(ser, (pi + (pi / 30)))
                    result, msgType = robotControl.readResponse(ser)
                else:
                    if x_center in range((xc_low), (xc_high)):
                        object_detected = True
                print("center of object:", x_center, "needs to be:", xc)
                print("I am in turning loop here - rotate sequence block")
                rawCapture.truncate(0)
                time.sleep(wait)
                checkFlag, x_center, object_area = camera_capture()
                xc_low, xc_high = Bound(object_area)
            print("Lined up with goal")
            break
Exemplo n.º 11
0
def getSensorsInCM():
    time.sleep(0.5)
    robotControl.getSensors(ser)
    sensors, msgType = robotControl.readResponse(ser)
    
    if msgType == 3:
        #print(sensors)
        DLS = int(LXD[0]*sensors[0]**5 + LXD[1]*sensors[0]**4 + LXD[2]*sensors[0]**3 + LXD[3]*sensors[0]**2 + LXD[4]*sensors[0] + LXD[5])
        DMS = int(MXD[0]*sensors[1]**5 + MXD[1]*sensors[1]**4 + MXD[2]*sensors[1]**3 + MXD[3]*sensors[1]**2 + MXD[4]*sensors[1] + MXD[5])
        DRS = int(RXD[0]*sensors[2]**5 + RXD[1]*sensors[2]**4 + RXD[2]*sensors[2]**3 + RXD[3]*sensors[2]**2 + RXD[4]*sensors[2] + RXD[5])
        print(DLS,DMS,DRS)
        return (DLS, DMS, DRS)
    else:
        L.M,R = getSensorsInCM()
Exemplo n.º 12
0
def check_goal_180():	
    
    for x in range(0,8): #performing 180 checks
        robotControl.rotateRobot(ser, pi+pi/8)
        results, msgType = robotControl.readResponse(ser)
        tracked = cap.see_obj()
        grabGoal = cap.haveNood()
    	
        if grabGoal or tracked:
           return (grabGoal,tracked)
        time.sleep(.25)
    #checking Goes here
    #add break for when we chase an object 
    turnL()
    turnL()
    return (0,0)
Exemplo n.º 13
0
def getNoodle(ser, arduinoSerialData, sense):

    turnFlag = 0

    print("Need to backup by ", sense, "cm")
    robotControl.moveRobotBackX(ser, int(sense))
    result = robotControl.readResponse(ser)
    sleep(0.5)

    if (turnFlag == 0):
        print("Going to turn 180 deg.")
        robotControl.rotateRobot(ser, m.pi)
        result = robotControl.readResponse(ser)
        sleep(0.5)
    elif (turnFlag == 1):
        print("Going to turn left 180 degrees.")
        robotControl.rotateRobot(ser, 7 * m.pi / 4)
        result = robotControl.readResponse(ser)
        sleep(0.5)
    else:
        print("Going to turn right 180 degrees.")
        robotControl.rotateRobot(ser, 160 * m.pi / 180)
        result = robotControl.readResponse(ser)
        sleep(0.5)

    # travel = 12 # cm

    print("Grabbing noodle.")
    robotControl.moveRobotBackX(ser, int(2 * sense))
    result = robotControl.readResponse(ser)
    sleep(0.5)

    # Grab the noodle with Arduino:
    print("Closing the clarm.")
    arduinoSerialData.write(b'3')
    sleep(2)

    print("ET Going Home.")
    robotControl.moveRobotXY(ser, 60, 0)
    result = robotControl.readResponse(ser)
    sleep(0.5)
Exemplo n.º 14
0
def return_Object():
    print("Going into return object function")
    hsv_min, hsv_max = changeHSV(3)  # set hsv values to orange goal color
    goalFlag = 0
    while (goalFlag == 0):
        # driving to object until sensor value is  max
        print("Beginning to drive to object")
        robotControl.moveRobotObs(ser)
        result, obs = robotControl.readResponse(ser)
        time.sleep(wait)
        #print(result)
        # Once sensor is maxed, check to see if object area is correct
        if (obs >= 1):
            checkFlag, x_center, temp_area = camera_capture()
            if checkFlag == 1:
                if temp_area >= area_min:
                    print("Made it to the Goal! Team Wall-E Rules!!")
                    goalFlag = 1
                # If sensor caused by obstacle
                elif temp_area < area_min:
                    print("Object area is:", temp_area,
                          "Object Area needs to be:", area_min)
                    print("Going into object avoidance")
                    #seek_object()
                    rotate_Sequence()
                    checkFlag, x_center, object_area = camera_capture()
                    # Line up center of contour with center of robot
                    # Object to Right of center, Rotate left
                    if checkFlag == 1:
                        print("Checking for difference in x")
                        value = x_center - xc
                        print("Difference from center:", value)
                        object_detected = False
                        while not object_detected:
                            if x_center < (xc_low):
                                print("LEFT!")
                                robotControl.rotateRobot(ser, pi / 25.7)
                                result, msgType = robotControl.readResponse(
                                    ser)
                            # Object to Left of center, rotate right
                            elif x_center > (xc_high):
                                print("RIGHT!")
                                robotControl.rotateRobot(
                                    ser, (pi + (pi / 25.7)))
                                result, msgType = robotControl.readResponse(
                                    ser)
                            else:
                                if x_center in range((xc_low), (xc_high)):
                                    object_detected = True
                            print("center of object:", x_center,
                                  "needs to be:", xc)
                            print(
                                "I am in turning loop here - return object block"
                            )
                            rawCapture.truncate(0)
                            time.sleep(wait)
                            checkFlag, x_center, object_area = camera_capture()
                            xc_low, xc_high = Bound(object_area)
                        print("object lined up")
                    elif checkFlag == 0:
                        rotate_Sequence()
            elif checkFlag == 0:
                rotate_Sequence()
        elif obs == 0:
            rotate_Sequence()
Exemplo n.º 15
0
def moveBWD():
	robotControl.moveRobotXY(-10,0,ser)
	results, msgType = robotControl.readResponse(ser)
Exemplo n.º 16
0
def turnBack():
	robotControl.rotateRobot(ser, pi)
	results, msgType = robotControl.readResponse(ser)
	return results
Exemplo n.º 17
0
def turnR():
	robotControl.rotateRobot(ser, (pi+(pi/2)))
	results, msgType = robotControl.readResponse(ser)
	return results
Exemplo n.º 18
0
def trackingAlg():
    moveList = []

    # Initialize frame count
    ##    frame_count = 0
    ##    FRAME_MAX = 6
    RADIUS_MAX = 120
    ##X_MIN = 163
    ##X_MAX = 476
    ##Y_MIN = 150
    ##Y_MAX = 450
    X_MIN = 256
    X_MAX = 384
    Y_MIN = 240
    Y_MAX = 360

    # capture frames from the camera
    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        ##        frame_count = frame_count + 1
        # grab the raw NumPy array representing the image, then initialize the timestamp
        # and occupied/unoccupied text
        # resize the frame, blur it, and convert it to the HSV
        # color space
        image = frame.array
        frame_im = imutils.resize(image, width=600)
        blurred = cv2.GaussianBlur(frame_im, (11, 11), 0)
        hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
        threshLower = (80, 90, 80)
        threshUpper = (250, 255, 255)

        # construct a mask for the color "blue", then perform
        # a series of dilations and erosions to remove any small
        # blobs left in the mask
        mask = cv2.inRange(hsv, threshLower, threshUpper)
        mask = cv2.erode(mask, None, iterations=2)
        mask = cv2.dilate(mask, None, iterations=2)
        cv2.imshow("mask", mask)

        # find contours in the mask and initialize the current
        # (x, y) center of the ball
        cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL,
                                cv2.CHAIN_APPROX_SIMPLE)
        cnts = cnts[0] if imutils.is_cv2() else cnts[1]
        center = None

        # only proceed if at least one contour was found
        if len(cnts) > 0:

            # find the largest contour in the mask, then use
            # it to compute the minimum enclosing circle and
            # centroid
            c = max(cnts, key=cv2.contourArea)
            ((x, y), radius) = cv2.minEnclosingCircle(c)
            M = cv2.moments(c)
            center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"]))
            #print("Center: " +str(center) )

            # only proceed if the radius meets a minimum size
            if radius > 30:
                # TODO: Add code for rotating robot in if statements
                if center[0] < X_MIN:
                    # Turn robot left
                    print("TURN LEFT")
                    moveList.append('L')
                    #time.sleep(.5)
                    robotControl.rotateRobot(ser, (math.pi / 24))
                    #time.sleep(.5)
                elif center[0] > X_MAX:
                    # Turn robot right
                    print("TURN RIGHT")
                    moveList.append('R')
                    #time.sleep(.5)
                    robotControl.rotateRobot(ser, 25 * (math.pi / 24))
                    #time.sleep(.5)
                elif radius < RADIUS_MAX:
                    # Move robot forward
                    print("MOVE FORWARD")
                    moveList.append('F')
                    robotControl.moveRobotXY(5, 0, ser)
                temp_result = robotControl.readResponse(ser)
                time.sleep(1)
                # draw the circle and centroid on the frame,
                # then update the list of tracked points
                cv2.circle(image, (int(x), int(y)), int(radius), (0, 255, 255),
                           2)
                cv2.circle(image, center, 5, (0, 0, 255), -1)
        else:
            ##            ser.close()
            ##            camera.close()
            rawCapture.truncate(0)
            return moveList

            # Reset frame count
##        if frame_count == FRAME_MAX:
##            frame_count = 0
# show the frame
        cv2.rectangle(image, (X_MIN, Y_MIN), (X_MAX, Y_MAX), (0, 255, 0), 2)
        cv2.imshow("Frame", image)

        key = cv2.waitKey(1) & 0xFF
        # clear the stream in preparation for the next frame
        rawCapture.truncate(0)
        if key == ord("q"):
            break
Exemplo n.º 19
0
def seek_Object():  # Coverage
    comm.readPacket(radio, 3)
    print("No Object Found - begin coverage")
    # Define the flags here, object-in-view flag is already defined below: - ZA
    robOrWall = 0  # 0 = nothing to do. 1 = robot, 2 = wall.
    wallCheck = 0
    # Get sensor data
    [L_avg, M_avg, R_avg] = sensor_Average()
    print("Left Sensor:", L_avg, "Middle Sensor:", M_avg, "Right Sensor:",
          R_avg)
    # If only left sensor near wall okay, move forward
    if (L_avg > L_min and R_avg < R_min and M_avg < M_min):
        robotControl.moveRobotXY(ser, Dist, 0)
        result, obs = robotControl.readResponse(ser)
        print(obs)
        if (obs >= 1):
            robotControl.rotateRobot(ser, ((math.pi / 16) + math.pi))
            result, msgType = robotControl.readResponse(ser)
            print("Object Flagged, Turning right  away from wall")
        print("Moving Forward, Left Wall follow")
    # if only right sensor near wall, move forward
    elif (L_avg < L_min and R_avg > R_min and M_avg < M_min):
        robotControl.moveRobotXY(ser, Dist, 0)
        result, obs = robotControl.readResponse(ser)
        print(obs)
        if (obs >= 1):
            robotControl.rotateRobot(ser, (math.pi / 16))
            robotControl.readResponse(ser)
            print("Object Flagged, Turning left  away from wall")
        print("Moving Forward, right wall follow")
    # if along left wall with obstacle in front, go to obstacle avoidance
    elif (L_avg > L_min and R_avg < R_min and M_avg > M_min):
        wallCheck = 1
        print("Going into object Avoidance - Left/Front Blocked")
    # if along right wall with obstacle in front, go to obstacle avoidance
    elif (L_avg < L_min and R_avg > R_min and M_avg > M_min):
        wallCheck = 1
        print("Going into Object Avoidance - Right/Front Blocked")
    # if only middle sensor near wall, move forward
    elif (L_avg < L_min and R_avg > R_min and M_avg < M_min):
        robotControl.moveRobotbackX(ser, 5)
        result, obs = robotControl.readResponse(ser)
        print(obs)
        if (obs >= 1):
            robotControl.rotateRobot(ser, (math.pi / 16))
            result, msgType = robotControl.readResponse(ser)
            print("Object Flagged, Turning away from  object")
        print("Moving backward, dont hit anything")
    # If blocked on all 3 Sides, turn 180 degrees
    elif (L_avg > L_min and R_avg > R_min and M_avg > M_min):
        robotControl.rotateRobot(ser, math.pi)
        result, msgType = robotControl.readResponse(ser)
        print("Turning Around")
    # if all 3 sides not blocked begin loop until side is found
    elif (L_avg < L_min and R_avg < R_min and M_avg < M_min):
        print("No sensors triggered")
        uflag = 0
        driveDist = 15
        while (uflag == 0):
            # Drive in increasing in size square till a wall or object is found
            robotControl.rotateRobot(ser, math.pi / 2)
            result, msgType = robotControl.readResponse(ser)
            time.sleep(wait)
            robotControl.moveRobotXY(ser, driveDist, 0)
            result, msgType = robotControl.readResponse(ser)
            time.sleep(wait)
            driveDist = driveDist + 5
            print("Drive Distance:", driveDist)
            time.sleep(wait)
            print("checking to see if object in view")
            checkFlag, x_center, object_area = camera_capture()

            if checkFlag == 1:
                # line up with object, confirm it is object, then set objectFlag to 1
                object_detected = False
                xc_low, xc_high = Bound(object_area)
                while not object_detected:
                    comm.readPacket(radio, 3)
                    if x_center < (xc_low):
                        print("LEFT!")
                        robotControl.rotateRobot(ser, pi / 25.7)
                        result, msgType = robotControl.readResponse(ser)
                    # Object to Left of center, rotate right
                    elif x_center > (xc_high):
                        print("RIGHT!")
                        robotControl.rotateRobot(ser, (pi + (pi / 25.7)))
                        result, msgType = robotControl.readResponse(ser)
                    else:
                        if x_center in range((xc_low), (xc_high)):
                            object_detected = True
                    print("center of object:", x_center, "needs to be:", xc)
                    print("I am in turning loop here - seek object function ")
                    rawCapture.truncate(0)
                    time.sleep(wait)
                    checkFlag, x_center, object_area = camera_capture()
                    xc_low, xc_high = Bound(object_area)
                time.sleep(wait)
                print("Checking to see if object has moved")
                time.sleep(wait)
                checkFlag, xc_temp, temp_area = camera_capture()
                if ((x_center - 30) <= xc_temp <= (x_center + 30)
                        and temp_area >= (0.97 * object_area)):
                    objectFlag = 1
                    print("object confirmed - going to object")
                    uflag = 1  # break out of loop

            L_avg, M_avg, R_avg = sensor_Average()
            print("Left Sensor:", L_avg, "Middle Sensor:", M_avg,
                  "Right Sensor:", R_avg)
            if (L_avg > L_min or R_avg > R_min):
                uflag = 1
                time.sleep(wait)

    # Change the HSV values to look for a white wall:
    if (wallCheck == 1):
        hsv_min, hsv_max = changeHSV(2)
        print("min values are:", hsv_min)
        checkFlag, x_center, object_area = camera_capture()
        if object_area >= 50000:
            # There is a wall infront, so set value to 2
            robOrWall = 2
            print("wall in front confirmed")
            print("Object Area is", object_area)
        # No a wall, is an obstacle so set value to 1
        elif object_area < 50000:
            robOrWall = 2
            print("Wall not confirmed - obstacle in front")
            print("Object Area is:", object_area)
        # obstacle avoidance program
        if (robOrWall == 1):
            L_avg, M_avg, R_avg = sensor_Average()
            print("Left Sensor: ", L_avg, "Middle Sensor: ", M_avg,
                  "Right Sensor: ", R_avg)
            if (L_avg > L_min and R_avg < R_min):
                print("Going into left side avoidance")
                time.sleep(3)
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Right.")
                print("Driving forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, (math.pi / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Left")
                print("Drive forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, (math.pi / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Left")
                print("Drive forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Right.")
            elif (L_avg < L_min and R_avg > R_min):
                print("Going into right side avoidance")
                time.sleep(3)
                robotControl.rotateRobot(ser, (math.pi / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Left")
                print("Drive forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Right.")
                print("Drive forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Right.")
                print("Drive forward.")
                robotControl.moveRobotXY(ser, int(Dist / 2), 0)
                robotControl.readResponse(ser)
                time.sleep(2)
                robotControl.rotateRobot(ser, (math.pi / 2))
                result, msgType = robotControl.readResponse(ser)
                print("Turning Left")
            else:
                print("Why are you here?")
        # Program if wall in front
        if (robOrWall == 2):
            L_avg, M_avg, R_avg = sensor_Average()
            print("Left Sensor: ", L_avg, "Middle Sensor: ", M_avg,
                  "Right Sensor: ", R_avg)
            if (L_avg > L_min and R_avg < R_min):
                print("Making Right Turn")
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                robotControl.readResponse(ser)
            elif (L_avg < L_min and R_avg > R_min):
                print("Making left  Turn")
                robotControl.rotateRobot(ser, ((3 * math.pi) / 2))
                robotControl.readResponse(ser)
        wallCheck = 0  # reset Flag
        robOrWall = 0  # reset flag
        hsv_min, hsv_max = changeHSV(0)  # reset hsv
Exemplo n.º 20
0
def main():

    # get the haar xml files for anime
    faceCascade = cv2.CascadeClassifier('cascade.xml')

    # Initialize flag to switch between started and stopped processes.
    startStop = 0

    # turn camera on to capture video
    camera = PiCamera()
    camera.framerate = 60
    camera.rotation = 180
    camera.resolution = (1920, 1080)  # Force resolution.
    rawCapture = PiRGBArray(camera)
    sleep(2)  # Let camera warm-up.

    counter = 0

    # while camera is on
    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        # read the camera
        img = frame.array

        # covert it to grayscale
        grayscaled = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
        # get the waifu
        faces = faceCascade.detectMultiScale(grayscaled, 3, 3)

        if (is_empty(faces) == 1 and startStop
                == 0):  # If faces has content, then check alignment.
            counter = counter + 1
            print(counter)
            # Create the process to start driving wheels.
            if (faces[0][0] + faces[0][2] < 700):
                # Turn Left 15 degrees.
                print("Turning Left: ", faces[0][0] + faces[0][2])
                robotControl.rotateRobot(ser, 15 * math.pi / 180)
                result = robotControl.readResponse(ser)
                sleep(0.5)
            elif (faces[0][0] + faces[0][2] > 1300):
                # Turn Right 15 degrees.
                print("Turning Right: ", faces[0][0] + faces[0][2])
                robotControl.rotateRobot(ser, 195 * math.pi / 180)
                result = robotControl.readResponse(ser)
                sleep(0.5)
            else:
                startStop = 1  # Started
                print("Driving a tiny bit: ", faces[0][0] + faces[0][2])
                process = mp.Process(target=driveRobotForward)
                process.start()
        elif (is_empty(faces) == 0 and startStop == 1):
            startStop = 0  # Stop it.
            process.join()  # Might need to terminate instead of join()
            process.terminate()

        # Code below needs to be removed for ssh-only applications.

        # draw rectangle for best girl
        # for (x,y,w,h) in faces:
        #     # Instead of drawing the rectangle, maybe drive forward here???
        #     cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,255),5)

        # show the image with rectangles
        # cv2.imshow('img', cv2.flip(img, 1))
        # wait for user to press the escape key to exit
        # k = cv2.waitKey(10) & 0xff

        # Clear the stream in preparation for the next frame. NEED to keep this for ssh-only applications.
        rawCapture.truncate(0)

        # if k == 27:
        #     break

    # close the camera
    camera.close()
    # Close the serial port.
    ser.close()
Exemplo n.º 21
0
    fails = 0
    turn_angles = [
        math.pi / 2, math.pi / 4, 0, math.pi + math.pi / 4,
        math.pi + math.pi / 2
    ]
    camera.start_preview()
    time.sleep(0.2)
    camera.stop_preview()
    search_for = red_nood
    #Coverage begins
    while True:
        lost = False
        sensor_dists = [0, 0, 0, 0, 0]
        #Start by turning left 90 degrees, then sprinkler effect-ing through a set of angles
        robotControl.rotateRobot(ser, math.pi / 2)
        result = robotControl.readResponse(ser)
        see_obj()
        robotControl.getSensors(ser)
        sens, msgType = robotControl.readResponse(ser)
        time.sleep(1)
        sensor_dists[0] = sens[1]
        ##        print("dists=" + str(sensor_dists))

        #Nick: This is the "exploration" piece
        for i in range(1, 5):
            robotControl.rotateRobot(ser, math.pi + math.pi / 4)
            blah = robotControl.readResponse(ser)
            lost = see_obj()
            if lost == True:
                break
            robotControl.getSensors(ser)
Exemplo n.º 22
0
def tracking(found):
    global search_for
    while True:
        ##        camera.start_preview()
        ##        time.sleep(0.2)
        with picamera.array.PiRGBArray(camera) as stream:
            camera.capture(stream, format='bgr')
            image = stream.array


##        camera.stop_preview()
        image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
        for (lower, upper) in search_for:
            lower = np.array(lower, dtype="uint8")
            upper = np.array(upper, dtype="uint8")
            mask = cv2.inRange(image, lower, upper)
            output = cv2.bitwise_and(image, image, mask=mask)
        rawCapture.truncate()
        #Scan across middle stripe
        nest_break = False
        found = False
        for i in range(0, width, 10):
            for j in range(int(height * 0.25), int(height), 10):
                if output[j, i, 2] > 0:
                    print("found while lost")
                    print("i = " + str(i))
                    print("j = " + str(j))
                    fails = 0
                    found = True
                    nest_break = True
                    break
            if nest_break == True:
                break

        if found == True:
            #First pixel is top-left corner
            left_edge = i
            top_edge = j
            #Scan right for right edge of cup
            for rightscanner in range(left_edge, width, 10):
                if output[top_edge, rightscanner, 2] == 0:
                    right_edge = rightscanner
                    break
            midpoint = int(math.floor((right_edge - left_edge) / 2))
            #Scan down from mid-point for bottom edge of cup
            for botscan in range(top_edge, height, 10):
                if output[botscan, midpoint, 2] == 0:
                    bot_edge = botscan
                    break
            p0 = left_edge
            p1 = top_edge
            p2 = bot_edge - top_edge
            p3 = right_edge - left_edge
            print("Left: " + str(left_edge))
            print("Right: " + str(right_edge))
            print("Top: " + str(top_edge))
            print("Bottom: " + str(bot_edge))
            bbox = (p0, p1, p3, p2)
            print("bbox = " + str(bbox))
            p1 = (int(bbox[0]), int(bbox[1]))
            p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
            print("Ready to start tracking")
            # Initialize tracker with first frame and bounding box
            cv2.putText(image, "*", im_cen, cv2.FONT_HERSHEY_SIMPLEX, 0.5,
                        (255, 0, 0), 2)
            p1 = (int(bbox[0]), int(bbox[1]))
            p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3]))
            centroid = (int(bbox[0] + bbox[2] / 2), int(bbox[1] + bbox[3] / 2))
            #If object on right side of image, rotate right
            if centroid[0] > (im_cen_x * 1.25):
                print("I'm turning right!")
                print(im_cen_x * 1.25)
                robotControl.rotateRobot(ser, math.pi + math.pi / 15)
                result = robotControl.readResponse(ser)
            elif centroid[0] < (im_cen_x * 0.75):
                print("I'm turning left!")
                print(im_cen_x * .75)
                robotControl.rotateRobot(ser, math.pi / 15)
                result = robotControl.readResponse(ser)
            else:
                robotControl.moveRobotXY(10, 0, ser)
                result = robotControl.readResponse(ser)
                if search_for == red_nood:
                    if output[int(im_cen_y),
                              int(im_cen_x * 0.75),
                              2] > 0 and output[int(im_cen_y),
                                                int(im_cen_x * 1.25), 2] > 0:
                        grabNood()
                        return
                else:
                    if output[int(im_cen_y),
                              int(im_cen_x * 0.35),
                              2] > 0 and output[int(im_cen_y),
                                                int(im_cen_x * 1.65), 2] > 0:
                        print("I'm home")
                        return
        else:
            print("back to seeObj")
            return

        # Exit if ESC pressed
        k = cv2.waitKey(1) & 0xff
        if k == 27: break

        rawCapture.truncate(0)
Exemplo n.º 23
0
def driveRobotForward():
    robotControl.moveRobotXY(10, 0, ser)
    result = robotControl.readResponse(ser)
    sleep(0.5)
Exemplo n.º 24
0
def check_end(): #if we see our end flag we chase it
    for x in range(0,2): #performing 180 checks 
        robotControl.rotateRobot(ser, pi+pi/8)
        results, msgType = robotControl.readResponse(ser)
        time.sleep(.2)
print("Starting")

# Open Serial Port
ser = serial.Serial("/dev/serial0", 115200)

# 17 September 2018 - Project 0
# Characterize your wheels.  Measure the variance in
# moving straight and rotation.

filename = input("Enter filename for output for moving straight: ")
f = open(filename, "w+")

for i in range(10):
    robotControl.moveRobotXY(20, 0, ser)
    result = robotControl.readResponse(ser)
    time.sleep(1)
    f.write(
        str(result.obsFlag) + " " + str(result.rightWheelDist) + " " +
        str(result.leftWheelDist) + " " + str(result.time) +
        "\n")  # Write to file

f.close()

filename = input("Enter filename for output for rotating: ")
f = open(filename, "w+")

for i in range(10):
    robotControl.rotateRobot(ser, (math.pi))
    result = robotControl.readResponse(ser)
    time.sleep(1)
Exemplo n.º 26
0
       "object area is:", object_area)
 if checkFlag == 1:
     comm.readPacket(radio, 3)
     # Line up center of contour with center of robot
     print("Checking for difference in x -object Flag block")
     value = x_center - xc
     print("Difference from center:", value)
     # Object to Right of center, Rotate left
     xc_low, xc_high = Bound(object_area)
     object_detected = False
     while not object_detected:
         comm.readPacket(radio, 3)
         if x_center < (xc_low):
             print("LEFT!")
             robotControl.rotateRobot(ser, pi / 30)
             result, msgType = robotControl.readResponse(ser)
         # Object to Left of center, rotate right
         elif x_center > (xc_high):
             print("RIGHT!")
             robotControl.rotateRobot(ser, (pi + (pi / 30)))
             result, msgType = robotControl.readResponse(ser)
         else:
             if x_center in range((xc_low), (xc_high)):
                 object_detected = True
         print("center of object:", x_center, "needs to be:", xc)
         print("I am in turning loop here - object 0 block")
         rawCapture.truncate(0)
         time.sleep(wait)
         checkFlag, x_center, object_area = camera_capture()
         xc_low, xc_high = Bound(object_area)
     rawCapture.truncate(0)