def reverseMoves(moveList): if len(moveList) == 0: return max = len(moveList) #180 degrees robotControl.rotateRobot(ser, (math.pi)) temp_result = robotControl.readResponse(ser) time.sleep(1) for x in range(max - 1, -1, -1): move = moveList[x] if 'L' in move: #move right robotControl.rotateRobot(ser, 25 * (math.pi / 24)) elif 'R' in move: #move left robotControl.rotateRobot(ser, (math.pi / 24)) elif 'F' in move: #move forward robotControl.moveRobotXY(5, 0, ser) temp_result = robotControl.readResponse(ser) time.sleep(1) robotControl.rotateRobot(ser, (math.pi)) temp_result = robotControl.readResponse(ser) time.sleep(1)
def lost_it(): robotControl.moveRobotXY(55, 0, ser) #straight movement time.sleep(.5) robotControl.rotateRobot(ser, math.pi + math.pi / 2) #corner turn time.sleep(.5) robotControl.moveRobotXY(20, 0, ser) #straight movement time.sleep(.5) robotControl.rotateRobot(ser, math.pi + math.pi / 2) #corner turn time.sleep(.5)
def robot_run(): #basically want to circle while keeping noodle at edge of sensors #option1: make a square around the noodle. at every corner, turn towards the # middle to make sure that the noodle is still there. If yes, then # keep drawing the square. If no, then... #ROBOT PLACEMENT: Start robot at bottom right corner so that #the noodle is on the left side of the robot #initial check robotControl.rotateRobot(ser, math.pi + math.pi / 4) #turn to look at center noodle_here = False is_taken = False time.sleep(1) noodle_here = see_obj() time.sleep(.5) if not noodle_here: print("Opsie, they took It T-T") is_taken = True bail.bail_out() time.sleep(.5) print("Goalie finished screaming and feels a little better") #robotControl.moveRobotXY(55, 0, ser) #time.sleep(1) elif noodle_here: print("Cool beans, It is still here") robotControl.rotateRobot(ser, math.pi / 4) #turn back to normal course time.sleep(.5) while not is_taken: robotControl.moveRobotXY(35, 0, ser) #straight movement time.sleep(1) robotControl.rotateRobot(ser, math.pi + math.pi / 2) #corner turn time.sleep(1) robotControl.rotateRobot(ser, math.pi + math.pi / 4) #turn to look at center time.sleep(1) #at this point, check if noodle is still in the middle. if it is, run off in the distance; #else if it is still there then continue to patrol noodle_here = see_obj() if not noodle_here: print("Opsie, they took It T-T") is_taken = True bail.bail_out() print("Goalie finished screaming and feels a little better") #robotControl.moveRobotXY(50, 0, ser) #time.sleep(1) elif noodle_here: print("Cool beans, It is still here") robotControl.rotateRobot(ser, math.pi / 4) #turn back to normal course time.sleep(1) while is_taken: lost_it()
def moveFWD(): robotControl.moveRobotXY(10,0,ser) results, msgType = robotControl.readResponse(ser) L1,M1,R1 = getSensorsInCM() if L1 not in range (-2, 5): while L1 > 9: results = robotControl.rotateRobot(ser, pi/45.7) results, msgType = robotControl.readResponse(ser) print("Adjusting left!") L1, M1, R1 = getSensorsInCM() while L1 < 0: results = robotControl.rotateRobot(ser, pi+pi/45.7) results, msgType = robotControl.readResponse(ser) print("Adjusting right!") L1, M1, R1 = getSensorsInCM() return results
def motors(q): # Connect to the microcontroller. serM = serial.Serial("/dev/serial0", 115200) print("Making connection to Microcontroller.") # Set the wheel speed. robotControl.changePWMvalues(serM, 150, 125) sleep(1) while True: # Read a command from the communication thread. command = q.get() print("Got a command.") # Check the command, act accordingly. # Packets in order of [0xDA, SRC, ACT, PARAM, 0xDB]; # But we will just pass ACT and PARAM through queue. ACT = command[0] PARAM = command[1] if (ACT == 0x1): robotControl.moveRobotXY(serM, PARAM, 0) result, msgType = robotControl.readResponse(serM) if (result.obsFlag == 1): print("Obstacle detected!") robotControl.rotateRobot(serM, m.pi) result, msgType = robotControl.readResponse(serM) print("Moving forward by ", PARAM) elif (ACT == 0x2): robotControl.rotateRobot(serM, m.pi * 2 - (PARAM * m.pi / 180)) result, msgType = robotControl.readResponse(serM) print("Turning left by ", PARAM) elif (ACT == 0x3): robotControl.rotateRobot(serM, (PARAM * m.pi / 180)) result, msgType = robotControl.readResponse(serM) print("Turning right by ", PARAM) else: print("Robot does not need to move.") break # We do not care about data concerning the new Simon, # or acknowledges in this process. serM.close()
def grabNood(): global search_for print("Turning and Grabbing the payload") robotControl.rotateRobot(ser, math.pi) result = robotControl.readResponse(ser) robotControl.moveRobotBackX(ser, 10) result = robotControl.readResponse(ser) print("Going home with the object") robotControl.moveRobotXY(20, 0, ser) result = robotControl.readResponse(ser) search_for = blue_goal if search_for == blue_goal: print("Now looking for goal") return search_for
def getNoodle(ser, arduinoSerialData, sense): turnFlag = 0 print("Need to backup by ", sense, "cm") robotControl.moveRobotBackX(ser, int(sense)) result = robotControl.readResponse(ser) sleep(0.5) if (turnFlag == 0): print("Going to turn 180 deg.") robotControl.rotateRobot(ser, m.pi) result = robotControl.readResponse(ser) sleep(0.5) elif (turnFlag == 1): print("Going to turn left 180 degrees.") robotControl.rotateRobot(ser, 7 * m.pi / 4) result = robotControl.readResponse(ser) sleep(0.5) else: print("Going to turn right 180 degrees.") robotControl.rotateRobot(ser, 160 * m.pi / 180) result = robotControl.readResponse(ser) sleep(0.5) # travel = 12 # cm print("Grabbing noodle.") robotControl.moveRobotBackX(ser, int(2 * sense)) result = robotControl.readResponse(ser) sleep(0.5) # Grab the noodle with Arduino: print("Closing the clarm.") arduinoSerialData.write(b'3') sleep(2) print("ET Going Home.") robotControl.moveRobotXY(ser, 60, 0) result = robotControl.readResponse(ser) sleep(0.5)
if center[0] < X_MIN: # Turn robot left print("TURN LEFT") #time.sleep(.5) robotControl.rotateRobot(ser, (math.pi / 24)) #time.sleep(.5) elif center[0] > X_MAX: # Turn robot right print("TURN RIGHT") #time.sleep(.5) robotControl.rotateRobot(ser, 25 * (math.pi / 24)) #time.sleep(.5) elif radius < RADIUS_MAX: # Move robot forward print("MOVE FORWARD") robotControl.moveRobotXY(5, 0, ser) # draw the circle and centroid on the frame, # then update the list of tracked points cv2.circle(image, (int(x), int(y)), int(radius), (0, 255, 255), 2) cv2.circle(image, center, 5, (0, 0, 255), -1) # update the points queue ## pts.appendleft(center) ## # loop over the set of tracked points ## for i in range(1, len(pts)): ## # if either of the tracked points are None, ignore ## # them ## if pts[i - 1] is None or pts[i] is None: ## continue ##
def driveRobotForward(): robotControl.moveRobotXY(10, 0, ser) result = robotControl.readResponse(ser) sleep(0.5)
def moveBWD(): robotControl.moveRobotXY(-10,0,ser) results, msgType = robotControl.readResponse(ser)
def trackingAlg(): moveList = [] # Initialize frame count ## frame_count = 0 ## FRAME_MAX = 6 RADIUS_MAX = 120 ##X_MIN = 163 ##X_MAX = 476 ##Y_MIN = 150 ##Y_MAX = 450 X_MIN = 256 X_MAX = 384 Y_MIN = 240 Y_MAX = 360 # capture frames from the camera for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True): ## frame_count = frame_count + 1 # grab the raw NumPy array representing the image, then initialize the timestamp # and occupied/unoccupied text # resize the frame, blur it, and convert it to the HSV # color space image = frame.array frame_im = imutils.resize(image, width=600) blurred = cv2.GaussianBlur(frame_im, (11, 11), 0) hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV) threshLower = (80, 90, 80) threshUpper = (250, 255, 255) # construct a mask for the color "blue", then perform # a series of dilations and erosions to remove any small # blobs left in the mask mask = cv2.inRange(hsv, threshLower, threshUpper) mask = cv2.erode(mask, None, iterations=2) mask = cv2.dilate(mask, None, iterations=2) cv2.imshow("mask", mask) # find contours in the mask and initialize the current # (x, y) center of the ball cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnts = cnts[0] if imutils.is_cv2() else cnts[1] center = None # only proceed if at least one contour was found if len(cnts) > 0: # find the largest contour in the mask, then use # it to compute the minimum enclosing circle and # centroid c = max(cnts, key=cv2.contourArea) ((x, y), radius) = cv2.minEnclosingCircle(c) M = cv2.moments(c) center = (int(M["m10"] / M["m00"]), int(M["m01"] / M["m00"])) #print("Center: " +str(center) ) # only proceed if the radius meets a minimum size if radius > 30: # TODO: Add code for rotating robot in if statements if center[0] < X_MIN: # Turn robot left print("TURN LEFT") moveList.append('L') #time.sleep(.5) robotControl.rotateRobot(ser, (math.pi / 24)) #time.sleep(.5) elif center[0] > X_MAX: # Turn robot right print("TURN RIGHT") moveList.append('R') #time.sleep(.5) robotControl.rotateRobot(ser, 25 * (math.pi / 24)) #time.sleep(.5) elif radius < RADIUS_MAX: # Move robot forward print("MOVE FORWARD") moveList.append('F') robotControl.moveRobotXY(5, 0, ser) temp_result = robotControl.readResponse(ser) time.sleep(1) # draw the circle and centroid on the frame, # then update the list of tracked points cv2.circle(image, (int(x), int(y)), int(radius), (0, 255, 255), 2) cv2.circle(image, center, 5, (0, 0, 255), -1) else: ## ser.close() ## camera.close() rawCapture.truncate(0) return moveList # Reset frame count ## if frame_count == FRAME_MAX: ## frame_count = 0 # show the frame cv2.rectangle(image, (X_MIN, Y_MIN), (X_MAX, Y_MAX), (0, 255, 0), 2) cv2.imshow("Frame", image) key = cv2.waitKey(1) & 0xFF # clear the stream in preparation for the next frame rawCapture.truncate(0) if key == ord("q"): break
def tracking(found): global search_for while True: ## camera.start_preview() ## time.sleep(0.2) with picamera.array.PiRGBArray(camera) as stream: camera.capture(stream, format='bgr') image = stream.array ## camera.stop_preview() image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) for (lower, upper) in search_for: lower = np.array(lower, dtype="uint8") upper = np.array(upper, dtype="uint8") mask = cv2.inRange(image, lower, upper) output = cv2.bitwise_and(image, image, mask=mask) rawCapture.truncate() #Scan across middle stripe nest_break = False found = False for i in range(0, width, 10): for j in range(int(height * 0.25), int(height), 10): if output[j, i, 2] > 0: print("found while lost") print("i = " + str(i)) print("j = " + str(j)) fails = 0 found = True nest_break = True break if nest_break == True: break if found == True: #First pixel is top-left corner left_edge = i top_edge = j #Scan right for right edge of cup for rightscanner in range(left_edge, width, 10): if output[top_edge, rightscanner, 2] == 0: right_edge = rightscanner break midpoint = int(math.floor((right_edge - left_edge) / 2)) #Scan down from mid-point for bottom edge of cup for botscan in range(top_edge, height, 10): if output[botscan, midpoint, 2] == 0: bot_edge = botscan break p0 = left_edge p1 = top_edge p2 = bot_edge - top_edge p3 = right_edge - left_edge print("Left: " + str(left_edge)) print("Right: " + str(right_edge)) print("Top: " + str(top_edge)) print("Bottom: " + str(bot_edge)) bbox = (p0, p1, p3, p2) print("bbox = " + str(bbox)) p1 = (int(bbox[0]), int(bbox[1])) p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) print("Ready to start tracking") # Initialize tracker with first frame and bounding box cv2.putText(image, "*", im_cen, cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2) p1 = (int(bbox[0]), int(bbox[1])) p2 = (int(bbox[0] + bbox[2]), int(bbox[1] + bbox[3])) centroid = (int(bbox[0] + bbox[2] / 2), int(bbox[1] + bbox[3] / 2)) #If object on right side of image, rotate right if centroid[0] > (im_cen_x * 1.25): print("I'm turning right!") print(im_cen_x * 1.25) robotControl.rotateRobot(ser, math.pi + math.pi / 15) result = robotControl.readResponse(ser) elif centroid[0] < (im_cen_x * 0.75): print("I'm turning left!") print(im_cen_x * .75) robotControl.rotateRobot(ser, math.pi / 15) result = robotControl.readResponse(ser) else: robotControl.moveRobotXY(10, 0, ser) result = robotControl.readResponse(ser) if search_for == red_nood: if output[int(im_cen_y), int(im_cen_x * 0.75), 2] > 0 and output[int(im_cen_y), int(im_cen_x * 1.25), 2] > 0: grabNood() return else: if output[int(im_cen_y), int(im_cen_x * 0.35), 2] > 0 and output[int(im_cen_y), int(im_cen_x * 1.65), 2] > 0: print("I'm home") return else: print("back to seeObj") return # Exit if ESC pressed k = cv2.waitKey(1) & 0xff if k == 27: break rawCapture.truncate(0)
def seek_Object(): # Coverage comm.readPacket(radio, 3) print("No Object Found - begin coverage") # Define the flags here, object-in-view flag is already defined below: - ZA robOrWall = 0 # 0 = nothing to do. 1 = robot, 2 = wall. wallCheck = 0 # Get sensor data [L_avg, M_avg, R_avg] = sensor_Average() print("Left Sensor:", L_avg, "Middle Sensor:", M_avg, "Right Sensor:", R_avg) # If only left sensor near wall okay, move forward if (L_avg > L_min and R_avg < R_min and M_avg < M_min): robotControl.moveRobotXY(ser, Dist, 0) result, obs = robotControl.readResponse(ser) print(obs) if (obs >= 1): robotControl.rotateRobot(ser, ((math.pi / 16) + math.pi)) result, msgType = robotControl.readResponse(ser) print("Object Flagged, Turning right away from wall") print("Moving Forward, Left Wall follow") # if only right sensor near wall, move forward elif (L_avg < L_min and R_avg > R_min and M_avg < M_min): robotControl.moveRobotXY(ser, Dist, 0) result, obs = robotControl.readResponse(ser) print(obs) if (obs >= 1): robotControl.rotateRobot(ser, (math.pi / 16)) robotControl.readResponse(ser) print("Object Flagged, Turning left away from wall") print("Moving Forward, right wall follow") # if along left wall with obstacle in front, go to obstacle avoidance elif (L_avg > L_min and R_avg < R_min and M_avg > M_min): wallCheck = 1 print("Going into object Avoidance - Left/Front Blocked") # if along right wall with obstacle in front, go to obstacle avoidance elif (L_avg < L_min and R_avg > R_min and M_avg > M_min): wallCheck = 1 print("Going into Object Avoidance - Right/Front Blocked") # if only middle sensor near wall, move forward elif (L_avg < L_min and R_avg > R_min and M_avg < M_min): robotControl.moveRobotbackX(ser, 5) result, obs = robotControl.readResponse(ser) print(obs) if (obs >= 1): robotControl.rotateRobot(ser, (math.pi / 16)) result, msgType = robotControl.readResponse(ser) print("Object Flagged, Turning away from object") print("Moving backward, dont hit anything") # If blocked on all 3 Sides, turn 180 degrees elif (L_avg > L_min and R_avg > R_min and M_avg > M_min): robotControl.rotateRobot(ser, math.pi) result, msgType = robotControl.readResponse(ser) print("Turning Around") # if all 3 sides not blocked begin loop until side is found elif (L_avg < L_min and R_avg < R_min and M_avg < M_min): print("No sensors triggered") uflag = 0 driveDist = 15 while (uflag == 0): # Drive in increasing in size square till a wall or object is found robotControl.rotateRobot(ser, math.pi / 2) result, msgType = robotControl.readResponse(ser) time.sleep(wait) robotControl.moveRobotXY(ser, driveDist, 0) result, msgType = robotControl.readResponse(ser) time.sleep(wait) driveDist = driveDist + 5 print("Drive Distance:", driveDist) time.sleep(wait) print("checking to see if object in view") checkFlag, x_center, object_area = camera_capture() if checkFlag == 1: # line up with object, confirm it is object, then set objectFlag to 1 object_detected = False xc_low, xc_high = Bound(object_area) while not object_detected: comm.readPacket(radio, 3) if x_center < (xc_low): print("LEFT!") robotControl.rotateRobot(ser, pi / 25.7) result, msgType = robotControl.readResponse(ser) # Object to Left of center, rotate right elif x_center > (xc_high): print("RIGHT!") robotControl.rotateRobot(ser, (pi + (pi / 25.7))) result, msgType = robotControl.readResponse(ser) else: if x_center in range((xc_low), (xc_high)): object_detected = True print("center of object:", x_center, "needs to be:", xc) print("I am in turning loop here - seek object function ") rawCapture.truncate(0) time.sleep(wait) checkFlag, x_center, object_area = camera_capture() xc_low, xc_high = Bound(object_area) time.sleep(wait) print("Checking to see if object has moved") time.sleep(wait) checkFlag, xc_temp, temp_area = camera_capture() if ((x_center - 30) <= xc_temp <= (x_center + 30) and temp_area >= (0.97 * object_area)): objectFlag = 1 print("object confirmed - going to object") uflag = 1 # break out of loop L_avg, M_avg, R_avg = sensor_Average() print("Left Sensor:", L_avg, "Middle Sensor:", M_avg, "Right Sensor:", R_avg) if (L_avg > L_min or R_avg > R_min): uflag = 1 time.sleep(wait) # Change the HSV values to look for a white wall: if (wallCheck == 1): hsv_min, hsv_max = changeHSV(2) print("min values are:", hsv_min) checkFlag, x_center, object_area = camera_capture() if object_area >= 50000: # There is a wall infront, so set value to 2 robOrWall = 2 print("wall in front confirmed") print("Object Area is", object_area) # No a wall, is an obstacle so set value to 1 elif object_area < 50000: robOrWall = 2 print("Wall not confirmed - obstacle in front") print("Object Area is:", object_area) # obstacle avoidance program if (robOrWall == 1): L_avg, M_avg, R_avg = sensor_Average() print("Left Sensor: ", L_avg, "Middle Sensor: ", M_avg, "Right Sensor: ", R_avg) if (L_avg > L_min and R_avg < R_min): print("Going into left side avoidance") time.sleep(3) robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Right.") print("Driving forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, (math.pi / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Left") print("Drive forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, (math.pi / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Left") print("Drive forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Right.") elif (L_avg < L_min and R_avg > R_min): print("Going into right side avoidance") time.sleep(3) robotControl.rotateRobot(ser, (math.pi / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Left") print("Drive forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Right.") print("Drive forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Right.") print("Drive forward.") robotControl.moveRobotXY(ser, int(Dist / 2), 0) robotControl.readResponse(ser) time.sleep(2) robotControl.rotateRobot(ser, (math.pi / 2)) result, msgType = robotControl.readResponse(ser) print("Turning Left") else: print("Why are you here?") # Program if wall in front if (robOrWall == 2): L_avg, M_avg, R_avg = sensor_Average() print("Left Sensor: ", L_avg, "Middle Sensor: ", M_avg, "Right Sensor: ", R_avg) if (L_avg > L_min and R_avg < R_min): print("Making Right Turn") robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) robotControl.readResponse(ser) elif (L_avg < L_min and R_avg > R_min): print("Making left Turn") robotControl.rotateRobot(ser, ((3 * math.pi) / 2)) robotControl.readResponse(ser) wallCheck = 0 # reset Flag robOrWall = 0 # reset flag hsv_min, hsv_max = changeHSV(0) # reset hsv