async def handle_command(websocket, path): while True: message = await websocket.recv() message = literal_eval(message) print(message) if message['command'] == 'moveForward': robot.forward() answer = {'action': 'moveForward'} elif message['command'] == 'moveBackward': robot.backward() answer = {'action': 'moveBackward'} elif message['command'] == 'turnRight': robot.right() answer = {'action': 'turnRight'} elif message['command'] == 'turnLeft': robot.left() answer = {'action': 'turnLeft'} elif message['command'] == 'stop': robot.stop() answer = {'action': 'stop'} elif message['command'] == 'grabberGrab': # grabber action grab answer = {'action': 'grabberGrab', 'value': message['value']} elif message['command'] == 'grabberTilt': # grabber action tilt answer = {'action': 'grabberTilt', 'value': message['value']} elif message['command'] == 'grabberHeight': # grabber action height answer = {'action': 'grabberHeight', 'value': message['value']} await websocket.send(json.dumps(answer))
def control_robot(): global ts, old_id ts = time.time() direction = request.args.get('direction', 0) mousedown = request.args.get('mousedown', 0) user_age = int(request.args.get('user_age', 0)) user_id = request.args.get('user_id', 0) for robot in ROBOTS: if user_id == robot.user_id: if mousedown == '1': print('button pressed:', direction) if direction == 'up': robot.forward() elif direction == 'left': robot.left() elif direction == 'right': robot.right() else: robot.reverse() else: robot.stop() return jsonify(color=robot.color, endgame=0) elif user_id == old_id: return jsonify(color=robot.color, endgame=1) # if above didn't return, new user: oldest_robot = max(ROBOTS, key=lambda x: x.age()) old_id = oldest_robot.user_id oldest_robot.user_id = user_id oldest_robot.user_age = user_age oldest_robot.user_time = time.time() print("new user: {}, {}, assigned {}".format(user_id, user_age, robot.color)) return jsonify(color=robot.color, endgame=0)
def follow_path(pos, path, grid): for cell in path: pos = move_to(pos, cell) if mode == robot.MANUAL: robot.stop() return pos return pos
def stop(): try: toolkit.verbose("Halting the system.") robot.stop() server.stop() except: toolkit.verbose("Uh-oh. An error occured when halting.")
def check_robot(interval): Timer(interval, check_robot, [interval]).start() global ts, ROBOTS # print(time.time() - ts) for robot in ROBOTS: if robot.is_time_exceeded(): robot.stop()
def get_picture(): #takes a single picture that should be up to date robot.stop() time.sleep(0.35) # maybe the camera is taking blurry pictures os.system('python /root/takepic.py') time.sleep(0.60) pic = cv2.imread('/root/temp.png') time.sleep(0.35) return pic
def robot_code(): robot.forward(20) robot.turn_right(90) robot.forward(40) robot.turn_left(90) robot.forward(40) robot.turn_right(90) robot.backwards(40) robot.stop()
def execute(img): global width global height global isObject global isMoving global start image = cv2.resize(img, (300,300)) # compute all detected objects detections = model(image) # draw all detections on image for det in detections[0]: bbox = det['bbox'] text = str(category_map[det['label']]) cv2.rectangle(image, (int(width * bbox[0]), int(height * bbox[1])), (int(width * bbox[2]), int(height * bbox[3])), (255, 0, 0), 2) cv2.putText(image, text, (int(width * bbox[0]), int(height * bbox[3])), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 0, 255), lineType=cv2.LINE_AA) # select detections that match selected class label matching_detections = [d for d in detections[0] if d['label'] == 53] # get detection closest to center of field of view and draw it det = closest_detection(matching_detections) if det is not None: bbox = det['bbox'] cv2.rectangle(image, (int(width * bbox[0]), int(height * bbox[1])), (int(width * bbox[2]), int(height * bbox[3])), (0, 255, 0), 5) if det is None: if isObject == True: start = time.time() isObject = False if (time.time() - start) > 4: robot.stop() # otherwsie steer towards target else: if isMoving == False: start = time.time() isMoving = True center = detection_center(det) print(center) # move robot forward and steer proportional target's x-distance from center if center[0] > 0.2: print('left') robot.left() elif center[0] < -0.2: print('right') robot.right() elif center[0] > -0.2 and center[0] < 0.2: print('forward') robot.forward() if (time.time() - start) > 2: isMoving = False return cv2.imencode('.jpg', image)[1].tobytes()
def _end(self): print("spin end") RobotAction._end(self) if (self.degree_target > 0): robot.stop() for i in range(0,50): robot.imu_loop() self.degrees = robot.imu_degrees() robot.imu_stop() print("gyro change=") print(self.degrees)
def explore(pos, grid): stderr.write("search area\n") global rows, cols, scale, discovered grid[pos[0]][pos[1]] = VISITED next_pos = get_neighbour(pos, grid) while next_pos is not None: pos = move_to(pos, next_pos) grid[pos[0]][pos[1]] = VISITED if mode == robot.MANUAL: robot.stop() return pos next_pos = get_neighbour(pos, grid) robot.stop() return pos
def action(client, userdata, message): data = message.payload.decode() data = json.loads(data) data = data['action'] print(data) if data == 'forward': print('forward') robot.forward() elif data == 'stop': print('stop') robot.stop() elif data == 'left': print('left') robot.left() elif data == 'right': print('right') robot.right() elif data == 'backward': print('backward') robot.backward() elif data == 'servo left': print('servo left') robot.servo_left() elif data == 'servo center': print('servo center') robot.servo_center() elif data == 'servo right': print('servo right') robot.servo_right() elif data == 'lights': print('lights') robot.lights() elif data == 'blinkers': print('blinkers') robot.blinkers() elif data == 'voltage': print('voltage') voltage = robot.voltage() voltage = send_json(voltage) myMQTT.publish('from_robot', voltage, 0) elif data == 'distance': print('distance') distance = robot.distance() distance = send_json(distance) myMQTT.publish('from_robot', distance, 0) else: pass
def reachable(pos, grid): stderr.write("check reachability\n") todo = deque() vis = set() previous = dict() goal = None # do a bfs to find closest unvisited grid cell cur = tuple(pos[0:2]) todo.append(cur) vis.add(cur) while todo: cur = todo.popleft() # check if current is unvisited if grid[cur[0]][cur[1]] == UNKNOWN: goal = cur break # add neighbours to queue for nbr in [ tuple([cur[0], cur[1]+1]), tuple([cur[0], cur[1]-1]), tuple([cur[0]+1, cur[1]]), tuple([cur[0]-1, cur[1]]) ]: if in_grid(nbr) and grid[nbr[0]][nbr[1]] != OBSTACLE and nbr not in vis: todo.append(nbr) previous[nbr] = cur vis.add(nbr) # no unvisited cells left, return None if goal is None: return None # find path to next unvisited cell, follow it path = [] while goal in previous: path.append(goal) goal = previous[goal] pos = follow_path(pos, path[::-1], grid) robot.stop() return pos
def move_div(): #if flask.request.method == 'POST': if flask.request.form['button'] == 'forward': print('forward!') robot.forward() elif flask.request.form['button'] == 'stop': print('stop!') robot.stop() elif flask.request.form['button'] == 'left': print('left') robot.left() elif flask.request.form['button'] == 'right': print('right') robot.right() elif flask.request.form['button'] == 'backward': print('backward') robot.backward() else: pass return flask.render_template('index.html')
def main(): # variables speed = 800 # UltraSonicSensor distance # Hammer swing strength swing_down = 700 swing_up = 250 # Spinner speed spin_speed = 300 robot.speak('I am alive') robot.stop() robot.wheels.move_forward(speed, speed) while True: #search mode while(not robot.hasDot()): if(robot.hasTouch()): robot.speak('Collision detected') robot.stop() if(robot.hasSight()): robot.speak('I see you') robot.wheels.move_backward(speed, speed, 10) robot.stop()
def automatic_mode(robot_pos, init_dir, to_search, sc): global scale, rows, cols, discovered, vdir, prev_pos, mode discovered = 0 rows = len(to_search) cols = len(to_search[0]) scale = sc vdir = init_dir prev_pos = robot_pos # do the search robot_pos = reachable(robot_pos, to_search) while robot_pos is not None: if mode == robot.MANUAL: manual_mode(robot_id, robot_pos, vdir) mode = robot.AUTO break else: robot_pos = explore(robot_pos, to_search) robot_pos = reachable(robot_pos, to_search) robot.stop() print("DONE") return discovered
def go(): #This is the main function ball_was_found = False #Used to check if the last movement caused the ball to be lost camera_is_reading = False # used to know if camera is already reading phase = 'turn' #starting phase should be seek when done last_phase = phase seek_direction = 0 # 0 for right, 1 for left. Will alternate counter = 0 if LOG: log_init() #Experimental variables: old_x = -999 # not used when doing angle instead of X coordinate old_y = -999 old_angle = -999 good_angle = False #use power adjustment for turning to center #use time adjustment for movement forward power_high = 1.00 power_low = 0.40 time_high = 1.00 time_low = 0.40 power_percent = 0.75 # start high (original code used 100% duty PWM for turning) time_percent = time_low # start small robot.arm_highest() robot.claw_open() while True: counter += 1 log = 'Second is ' + str(counter) + '\n' log += 'Start Phase: ' + str(phase) + '\n' if phase == 'seek': if (camera_is_reading == False): camera_is_reading = True camera = camera_setup() orig = get_picture_self( camera) # read from camera for real time with 2 second lag else: if (camera_is_reading == True): camera_is_reading = False camera.release() orig = get_picture() # get 1 picture for up to date images center_of_mass, size, ratio, notorig = where_dat_ball(orig) angle = get_angle_from_com(center_of_mass) if (old_x == -999 or old_y == -999 or old_angle == -999): #this should only happen once old_x = center_of_mass[1] old_y = center_of_mass[0] old_angle = angle #d meaning delta = change in the value dx = old_x - center_of_mass[1] dy = old_y - center_of_mass[0] da = old_angle - angle old_y = center_of_mass[0] old_x = center_of_mass[1] #calculate changing amount of time/power if (abs(angle) > CLAW_ANGLE): #we're trying to fix angle if (good_angle == True): # the angle is now bad, so reset the percents time_percent = time_low power_percent = power_high good_angle = False if (abs(dx) > abs(center_of_mass[1] - MID_X)): power_percent -= 0.20 else: if (abs(dx) < abs(center_of_mass[1] - MID_X) * 0.9): power_percent += 0.10 else: #we're trying to fix distance if (good_angle == False): # the angle is now good, so reset the percents time_percent = time_low power_percent = power_high good_angle = True if (abs(dy) < abs(center_of_mass[0] - CLAW_DISTANCE) * 0.8): time_percent += 0.30 else: if (abs(dy) > abs(center_of_mass[0] - CLAW_DISTANCE)): time_percent -= 0.20 #percents should always be between 0 and 1, and within the HIGH and LOW constants set above power_percent = min(max(power_percent, power_low), power_high) time_percent = min(max(time_percent, time_low), time_high) print "dx = ", dx, ", dy = ", dy, ", da = ", da print "pp = ", power_percent, ", tp = ", time_percent print('CenterOfMass = ' + str(center_of_mass) + '\n') print "angle = ", angle print('Size = ' + str(size) + '\n') log += 'center_of_mass = %.2f , %.2f' % (center_of_mass[0], center_of_mass[1]) + '\n' log += 'size = ' + str(size) + '\n' if (ball_was_found == True and size == 0): #we lost the ball after moving. ball_was_found = False if (last_phase == 'seek'): #Assume went too far right (or left) print "Size is 0 after ball was found. Assuming that we turned too far" turn_time = 1.0 # Assume angle is somewhere to the left/right seek_direction = not seek_direction robot.spinfortime(turn_time, 70 * POWER, seek_direction) elif (last_phase == 'turn'): #Assume we moved too far forward. time_percent = time_low print "Size is 0 after ball was found, assuming that we went too far forward" #show_picture() #!! DEBUG ONLY robot.timed_backward(.09 * time_percent, 65 * POWER) orig = get_picture() center_of_mass, size, ratio, notorig = where_dat_ball(orig) last_phase = phase if (size > 800): print "SOMETHING WENT WRONG" print "The size is too big. Angle is probably big too." #print "Exiting to stop out of control robot" robot.stop() if (camera_is_reading): camera_is_reading = False camera.release() #show_picture() #!! DEBUG ONLY #exit() # Do the appropriate action based on what phase we are in if phase == 'seek': if size == 0: print('Did not Detect Ball. Spinning') print "seek_direction = ", seek_direction log += 'Did not find ball, spinning.\n' robot.spin(50 * POWER, seek_direction) #robot.spinfortime(2.5,50,seek_direction) #time.sleep(0.25) else: phase = 'turn' robot.stop() #camera.release() #del camera #time.sleep(3) log += 'Found Object, Centering.\n' print('Found Object, Moving to Turn Phase') if phase == 'turn': if size == 0: ball_was_found = False phase = 'seek' s = 'Ball Lost, Seeking' log += s print(s) continue ball_was_found = True if abs(angle) < CLAW_ANGLE: # phase = 'move' robot.arm_highest() s = 'Angle Good. Going to Move Phase' print(s) log += s if center_of_mass[0] > CLAW_DISTANCE: robot.pickup() robot.spinfortime(.6, 50 * POWER, True) #robot.timed_forward(.3, 45) robot.release() robot.arm_highest() #Done: cv2.destroyAllWindows() print "\n\nJob Complete. Sleeping for 10 seconds. Press Ctrl+Z to quit.\n" time.sleep(10) # exit() else: robot.timed_forward(time_percent * 0.10, power_percent * 100 * POWER) else: turn_left = angle < 0 turn_time = np.float32(estimate_turn_time(angle)) print('Turning: ') print(' Turn Angle:' + str(angle)) print(' Turn Time: ' + str(turn_time)) robot.spinfortime(turn_time, 100 * power_percent * POWER, turn_left) if LOG: log += 'End Phase: ' + str(phase) #if counter % 2 != 0: # continue picnum += 1 zerostring = '000' if picnum > 9: zerostring = '00' if picnum > 99: zerostring = '0' if picnum > 999: zerostring = '' filename = 'Instance_' + str( instance) + '_Pic_' + zerostring + str(picnum) + '.png' fullname = os.path.join(DIRECTORY, filename) fig = plt.figure(1) frame = plt.subplot2grid((1, 2), (0, 0)) frame.imshow(orig) plt.text(center_of_mass[1], center_of_mass[0], '+', fontsize=60, color='green', verticalalignment='center', horizontalalignment='center') frame = plt.subplot2grid((1, 2), (0, 1)) frame.text(0.1, 0.9, log, transform=frame.transAxes, va='top', ha='left', fontsize=8) frame.axes.get_xaxis().set_visible(False) frame.axes.get_yaxis().set_visible(False) frame.patch.set_visible(False) frame.axis('off') print 'Saving Log...' plt.savefig(fullname) print('Log Save Time: ' + str(time.time() - t0) + ' seconds.') # End LOG # end While robot.stop_all() cv2.destroyAllWindows() return None
def run(): """ Perform experiments: setups, executions, save results """ import sys if sys.version_info[0] < 3: sys.exit("Sorry, Python 3 required") exp.check() # Check experiment parameters # copy the selected taskfile to speed up the execution: copyfile("tasks/" + exp.TASK_ID + ".py", "task.py") import task import robot import lp import show import save task.setup() caption = (exp.TASK_ID + "_" + exp.ALGORITHM + "_" + exp.ACTION_STRATEGY) if exp.SUFFIX: caption += "_" + exp.SUFFIX save.new_dir(results_path, caption) # create result directory epi = 0 # Average Reward per step (aveR): ave_r = np.zeros((exp.N_REPETITIONS, exp.N_STEPS)) # Mean(aveR) of all tests per step mean_ave_r = np.zeros(exp.N_STEPS) # AveR per episode epi_ave_r = np.zeros([exp.N_REPETITIONS, exp.N_EPISODES]) # actual step time per episode (for computational cost only) actual_step_time = np.zeros(exp.N_REPETITIONS) if exp.LEARN_FROM_MODEL: import model file_model = tasks_path + exp.FILE_MODEL + "/" + exp.FILE_MODEL model.load(file_model, exp.N_EPISODES_MODEL) else: robot.connect() # Connect to V-REP / ROS if exp.CONTINUE_PREVIOUS_EXP: prev_exp = __import__(exp.PREVIOUS_EXP_FILE) print("NOTE: Continue experiments from: " + exp.PREVIOUS_EXP_FILE) time.sleep(3) # Experiment repetition loop ------------------------------------------ for rep in range(exp.N_REPETITIONS): if exp.CONTINUE_PREVIOUS_EXP: last_q, last_v = prev_exp.q, prev_exp.v last_policy, last_q_count = prev_exp.policy, prev_exp.q_count else: last_q = last_v = last_policy = last_q_count = None # Episode loop ------------------ for epi in range(exp.N_EPISODES): if exp.LEARN_FROM_MODEL: print("Learning from Model") task.STEP_TIME = 0 lp.step_time = 0 else: robot.start() show.process_count(caption, rep, epi, exp.EPISODIC) lp.setup() # Learning process setup if (exp.EPISODIC and epi > 0) or exp.CONTINUE_PREVIOUS_EXP: lp.q, lp.v = last_q, last_v lp.policy, lp.count = last_policy, last_q_count lp.run() # Execute the learning process if not exp.LEARN_FROM_MODEL: robot.stop() ave_r[rep] = lp.ave_r_step actual_step_time[rep] = lp.actual_step_time if exp.EPISODIC: last_q, last_v = lp.q, lp.v last_policy, last_q_count = lp.policy, lp.q_count epi_ave_r[rep, epi] = lp.ave_r_step[lp.step] if exp.EXPORT_SASR_step: save.simple(lp.sasr_step, "SASR_step") # end of episode show.process_remaining(rep, epi) mean_ave_r = np.mean(ave_r, axis=0) # End of experiment repetition loop ---------------------------- # Mean of AveR per step (last episode) save.plot_mean(mean_ave_r, epi) save.simple(ave_r, "aveR") # If EPISODIC: Save ave_r of last episode if exp.EPISODIC: # Mean of AveR reached (last step) per episode mean_epi_ave_r = np.mean(epi_ave_r, axis=0) save.plot_mean(mean_epi_ave_r, "ALL") save.simple(epi_ave_r, "EPI") final_r = mean_ave_r[lp.step] final_actual_step_time = np.mean(actual_step_time) save.log(final_r, final_actual_step_time) save.arrays() print("Mean average Reward = %0.2f" % final_r, "\n") print("Mean actual step time (s): %0.6f" % final_actual_step_time, "\n") if not exp.LEARN_FROM_MODEL: robot.disconnect()
def do_POST(self): global f global b global r global l global robot self._set_headers() form = cgi.FieldStorage(fp=self.rfile, headers=self.headers, environ={'REQUEST_METHOD': 'POST'}) print form.getvalue("forward") print form.getvalue("backward") print form.getvalue("left") print form.getvalue("right") if form.getvalue("forward") == "Forward": if f == 0: robot.stop() robot.forward() f = 1 else: robot.stop() f = 0 if form.getvalue("backward") == "Backward": if b == 0: robot.stop() robot.backward() b = 1 else: robot.stop() b = 0 if form.getvalue("left") == "Left": if l == 0: robot.stop() robot.left() l = 1 else: robot.stop() l = 0 if form.getvalue("right") == "Right": if r == 0: robot.stop() robot.right() r = 1 else: robot.stop() r = 0 file = open("index.html", "r") page = file.read() file.close() self.wfile.write(page)
def run_line_following(robot): global camera_image_mutex, camera_image, patched_image log(INFO, 'Starting line following mode') # Deactivate the use of PID as it provides better results # with those little steps with we use this mode robot.use_pid(False) # ################################ # # Read reference stop and u-turn # # images and shapes, for later use # # ################################ # stop_image = cv2.imread('stop.png' , 0) u_turn_image = cv2.imread('u-turn.png', 0) return_value, stop_thresholded = cv2.threshold(stop_image , 127, 255, 0) return_value, u_turn_thresholded = cv2.threshold(u_turn_image, 127, 255, 0) stop_contours , hierarchy = cv2.findContours(stop_thresholded , cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) u_turn_contours, hierarchy = cv2.findContours(u_turn_thresholded, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE) stop_contour = stop_contours [0] u_turn_contour = u_turn_contours[0] was_center_on_the_left = None while control.main_mode == MODE.FOLLOW_LINE: # Deal with startup, when no camera image is ready yet if camera_image is None: continue camera_image_mutex.acquire() patched_image_mutex.acquire() patched_image = camera_image.copy() camera_image_mutex.release() # Turn image to gray, separate lower and upper parts, apply threshold & get best shape lower_part_shape, thresholded_image = get_best_shape(patched_image, (0, 0), (367, 169), control.line_threshold) upper_part_shape, thresholded_image = get_best_shape(patched_image, (0, 170), (367, 239), control.line_threshold) # Draw a line to show separation between lower and upper parts cv2.line(patched_image, (0, 170), (367, 170), (255, 255, 255), 1) # ############################# # # Deal with upper part of image # # ############################# # got_a_stop_match = False got_a_u_turn_match = False stop_draw_color = (255, 255, 255) u_turn_draw_color = (255, 255, 255) sign_draw_color = (255, 255, 255) if upper_part_shape is None: log(DEBUG, 'Found no shape in upper part') else: # Get rotated rectangle of upper shape rectangle = cv2.minAreaRect(upper_part_shape) box = cv2.boxPoints(rectangle) box = numpy.int0(box) # Compute match of that shape against stop sign stop_match_value = cv2.matchShapes(upper_part_shape, stop_contour, cv2.CONTOURS_MATCH_I1, 0) # Compute match of that shape against u-turn sign u_turn_match_value = cv2.matchShapes(upper_part_shape, u_turn_contour, cv2.CONTOURS_MATCH_I1, 0) # Deal with upper part shape against stop sign if stop_match_value < 0.50: log(DEBUG, 'Found STOP sign') got_a_stop_match = True stop_draw_color = (0, 255, 0) sign_draw_color = (0, 255, 0) # Deal with upper part shape against u-turn sign if u_turn_match_value < 0.75: log(DEBUG, 'Found U-TURN sign') got_a_u_turn_match = True u_turn_draw_color = (0, 255, 0) sign_draw_color = (0, 255, 0) cv2.putText(patched_image, 'Stop @ {:04.2f}'.format (stop_match_value ), (10, 20), cv2.FONT_HERSHEY_PLAIN, 1, stop_draw_color , 1, cv2.FILLED) cv2.putText(patched_image, 'U-turn @ {:04.2f}'.format(u_turn_match_value), (10, 40), cv2.FONT_HERSHEY_PLAIN, 1, u_turn_draw_color, 1, cv2.FILLED) # Draw rotated rectangle of upper shape patched_image = cv2.drawContours(patched_image, [box], 0, sign_draw_color, 2) # ############################# # # Deal with lower part of image # # ############################# # # Ignore a malformed shape; consider it as if no shape was found if lower_part_shape is not None and len(lower_part_shape) < 5: log(DEBUG, 'Found malformed shape in lower part') lower_part_shape = None if lower_part_shape is None: log(DEBUG, 'Found no actual shape in lower part') patched_image_mutex.release() robot.stop() # Deal with upper part shape against stop sign if got_a_stop_match == True: log(DEBUG, 'Executing STOP sign') # No need to stop robot at it's already stopped was_center_on_the_left = None # Deal with upper part shape against u-turn sign elif got_a_u_turn_match == True: log(DEBUG, 'Executing U-TURN sign') robot.left_with_angle(180, True) robot.stop_for_period(1.0) was_center_on_the_left = not(was_center_on_the_left) # Try and get back on the track; this first test deals with startup elif was_center_on_the_left is None: pass elif was_center_on_the_left == True: log(DEBUG, 'Last known position of line was on the left: correcting to the left') robot.left_step() robot.stop_for_period(0.2) else: log(DEBUG, 'Last known position of line was on the right: correcting to the right') robot.right_step() robot.stop_for_period(0.2) else: log(DEBUG, 'Found an actual shape in lower part') # Get rotated rectangle of lower shape rectangle = cv2.minAreaRect(lower_part_shape) box = cv2.boxPoints(rectangle) box = numpy.int0(box) # Ge the center of lower shape M = cv2.moments(lower_part_shape) center_x = int(M["m10"] / M["m00"]) center_y = int(M["m01"] / M["m00"]) if center_x < 183: was_center_on_the_left = True else: was_center_on_the_left = False if 113 < center_x < 253: center_draw_color = (0, 255, 0) else: center_draw_color = (0, 0, 255) # Draw rotated rectangle and center of lower shape patched_image = cv2.drawContours(patched_image, [box], 0, center_draw_color, 2) cv2.circle (patched_image, (center_x, center_y), 5, center_draw_color, -1) cv2.putText(patched_image, 'Center: {:03}'.format(center_x), (250, 230), cv2.FONT_HERSHEY_PLAIN, 1, center_draw_color, 1, cv2.FILLED) patched_image_mutex.release() # Implement line following based on lower part lateral position if 113 < center_x < 253: log(DEBUG, 'Correctly aligned with line: moving on') robot.forward_step() robot.stop_for_period(0.2) elif center_x < 113: log(DEBUG, 'Going away from line: correcting to the left') robot.left_step() robot.stop_for_period(0.2) else: log(DEBUG, 'Going away from line: correcting to the right') robot.right_step() robot.stop_for_period(0.2) robot.stop() # Restore the use of PID for other modes robot.use_pid(True)
def go(): #This is the main function ball_was_found = False #Used to check if the last movement caused the ball to be lost camera_is_reading = False # used to know if camera is already reading phase = 'turn' #starting phase should be seek when done last_phase = phase seek_direction = 0 # 0 for right, 1 for left. Will alternate counter = 0 if LOG: log_init() #Experimental variables: old_x = -999 # not used when doing angle instead of X coordinate old_y = -999 old_angle = -999 good_angle = False #use power adjustment for turning to center #use time adjustment for movement forward power_high = 1.00 power_low = 0.40 time_high = 1.00 time_low = 0.40 power_percent = 0.75 # start high (original code used 100% duty PWM for turning) time_percent = time_low # start small robot.arm_highest() robot.claw_open() while True: counter += 1 log = 'Second is ' + str(counter) + '\n' log += 'Start Phase: ' + str(phase) + '\n' if phase == 'seek': if(camera_is_reading == False): camera_is_reading = True camera = camera_setup() orig = get_picture_self(camera) # read from camera for real time with 2 second lag else: if (camera_is_reading == True): camera_is_reading = False camera.release() orig = get_picture() # get 1 picture for up to date images center_of_mass, size, ratio, notorig = where_dat_ball(orig) angle = get_angle_from_com(center_of_mass) if(old_x == -999 or old_y == -999 or old_angle == -999): #this should only happen once old_x = center_of_mass[1] old_y = center_of_mass[0] old_angle = angle #d meaning delta = change in the value dx = old_x - center_of_mass[1] dy = old_y - center_of_mass[0] da = old_angle - angle old_y = center_of_mass[0] old_x = center_of_mass[1] #calculate changing amount of time/power if( abs(angle) > CLAW_ANGLE ): #we're trying to fix angle if (good_angle == True): # the angle is now bad, so reset the percents time_percent = time_low power_percent = power_high good_angle = False if (abs(dx) > abs(center_of_mass[1]-MID_X)): power_percent -= 0.20 else: if (abs(dx) < abs(center_of_mass[1]-MID_X)*0.9): power_percent += 0.10 else: #we're trying to fix distance if(good_angle == False): # the angle is now good, so reset the percents time_percent = time_low power_percent = power_high good_angle = True if (abs(dy) < abs(center_of_mass[0] - CLAW_DISTANCE)*0.8): time_percent += 0.30 else: if (abs(dy) > abs(center_of_mass[0] - CLAW_DISTANCE)): time_percent -= 0.20 #percents should always be between 0 and 1, and within the HIGH and LOW constants set above power_percent = min(max(power_percent, power_low), power_high) time_percent = min(max(time_percent, time_low), time_high) print "dx = ", dx, ", dy = ", dy, ", da = ", da print "pp = ", power_percent, ", tp = ", time_percent print('CenterOfMass = '+ str(center_of_mass) + '\n') print "angle = ",angle print('Size = ' + str(size) + '\n') log += 'center_of_mass = %.2f , %.2f'%(center_of_mass[0],center_of_mass[1]) + '\n' log += 'size = ' + str(size) + '\n' if(ball_was_found == True and size == 0): #we lost the ball after moving. ball_was_found = False if(last_phase == 'seek'): #Assume went too far right (or left) print "Size is 0 after ball was found. Assuming that we turned too far" turn_time = 1.0 # Assume angle is somewhere to the left/right seek_direction = not seek_direction robot.spinfortime(turn_time,70*POWER, seek_direction) elif (last_phase == 'turn'): #Assume we moved too far forward. time_percent = time_low print "Size is 0 after ball was found, assuming that we went too far forward" #show_picture() #!! DEBUG ONLY robot.timed_backward(.09*time_percent,65*POWER) orig = get_picture() center_of_mass, size, ratio, notorig = where_dat_ball(orig) last_phase = phase if(size > 800): print "SOMETHING WENT WRONG" print "The size is too big. Angle is probably big too." #print "Exiting to stop out of control robot" robot.stop() if(camera_is_reading): camera_is_reading = False camera.release() #show_picture() #!! DEBUG ONLY #exit() # Do the appropriate action based on what phase we are in if phase == 'seek': if size == 0: print('Did not Detect Ball. Spinning' ) print "seek_direction = ", seek_direction log += 'Did not find ball, spinning.\n' robot.spin(50*POWER, seek_direction) #robot.spinfortime(2.5,50,seek_direction) #time.sleep(0.25) else: phase = 'turn' robot.stop() #camera.release() #del camera #time.sleep(3) log += 'Found Object, Centering.\n' print('Found Object, Moving to Turn Phase') if phase == 'turn': if size == 0: ball_was_found = False phase = 'seek' s = 'Ball Lost, Seeking' log += s print(s) continue ball_was_found = True if abs(angle) < CLAW_ANGLE: # phase = 'move' robot.arm_highest() s = 'Angle Good. Going to Move Phase' print (s) log += s if center_of_mass[0] > CLAW_DISTANCE: robot.pickup() robot.spinfortime(.6,50*POWER,True) #robot.timed_forward(.3, 45) robot.release() robot.arm_highest() #Done: cv2.destroyAllWindows() print "\n\nJob Complete. Sleeping for 10 seconds. Press Ctrl+Z to quit.\n" time.sleep(10) # exit() else: robot.timed_forward(time_percent*0.10,power_percent*100*POWER) else: turn_left = angle<0 turn_time = np.float32(estimate_turn_time(angle)) print('Turning: ') print(' Turn Angle:' + str(angle)) print(' Turn Time: ' + str(turn_time)) robot.spinfortime(turn_time,100*power_percent*POWER,turn_left) if LOG: log += 'End Phase: ' + str(phase) #if counter % 2 != 0: # continue picnum += 1 zerostring = '000' if picnum > 9: zerostring = '00' if picnum > 99: zerostring = '0' if picnum > 999: zerostring = '' filename = 'Instance_' + str(instance) + '_Pic_' + zerostring + str(picnum) + '.png' fullname = os.path.join(DIRECTORY, filename) fig = plt.figure(1) frame = plt.subplot2grid((1,2),(0,0)) frame.imshow(orig) plt.text(center_of_mass[1], center_of_mass[0], '+', fontsize=60, color='green',verticalalignment='center', horizontalalignment='center') frame = plt.subplot2grid((1,2),(0,1)) frame.text(0.1,0.9,log, transform=frame.transAxes, va='top', ha='left', fontsize=8) frame.axes.get_xaxis().set_visible(False) frame.axes.get_yaxis().set_visible(False) frame.patch.set_visible(False) frame.axis('off') print 'Saving Log...' plt.savefig(fullname) print('Log Save Time: ' + str(time.time()-t0) + ' seconds.') # End LOG # end While robot.stop_all() cv2.destroyAllWindows() return None
def console_thread(robot, left_motor, right_motor, imu_device, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar, speed_pid_controller): print_help() is_console_on = True selected_pid = 1 while is_console_on == True: print('> ', end = '') user_input = input() if len(user_input) == 1: command = user_input[0] if command == '1': print('') print('Speed PID selected') print('') selected_pid = 1 elif command == 'm': print('') left_motor.print_info ('LEFT DC MOTOR') right_motor.print_info('RIGHT DC MOTOR') print('') elif command == 'v': print('') front_pan_tilt.print_info('FRONT') print('') back_pan_tilt.print_info ('BACK' ) print('') elif command == 'c': print('') print('SPEED PID:') speed_pid_controller.print_info() print('') elif command == 'u': print('') imu_device.print_info() print('') elif command == 'r': print('') robot.print_info() print('') front_lidar.print_info('FRONT LIDAR') back_lidar.print_info ('BACK LIDAR') print('') print('Line following threshold: {}'.format(control.line_threshold)) print('') elif command == 'q': print('') print('***** GOING TO OPERATIONAL MODE *****') is_console_on = False elif command == 'x': print('***** EXITING GRACEFULLY *****') robot.stop() front_pan_tilt.stop() back_pan_tilt.stop () if status.is_rpi_gpio_used: RPi.GPIO.cleanup() os._exit(0) elif command == 'h': print_help() elif len(user_input) == 2: command = user_input[0:2] if command == 'sf': robot.forward_step() elif command == 'sb': robot.backward_step() elif command == 'sl': robot.left_step() elif command == 'sr': robot.right_step() elif len(user_input) > 2 and user_input[1] == '=': command = user_input[0] value = float(user_input[2:]) if command == 'l': if value == 0: log_set_level(NO_LOG ) elif value == 1: log_set_level(ERROR ) elif value == 2: log_set_level(WARNING) elif value == 3: log_set_level(INFO ) elif value == 4: log_set_level(DEBUG ) else: print('Invalid log level') elif command == 'p': if selected_pid == 1: speed_pid_controller.set_kp(value) elif command == 'i': if selected_pid == 1: speed_pid_controller.set_ki(value) elif command == 'd': if selected_pid == 1: speed_pid_controller.set_kd(value) elif command == 'w': if selected_pid == 1: speed_pid_controller.set_anti_wind_up(value) elif command == 's': if value > 0: robot.forward(value) elif value < 0: robot.backward(-value) else: robot.stop() elif command == 'a': if value < 0: robot.right_with_angle(-value, True) else: robot.left_with_angle(value, True) elif command == 'g': if value < 0: robot.right_with_strength(-value) else: robot.left_with_strength(value) elif command == 't': control.line_threshold = value elif len(user_input) > 2 and user_input[2] == '=': command = user_input[0:2] value = float(user_input[3:]) if command == 'fp': front_pan_tilt.set_pan(value) elif command == 'ft': front_pan_tilt.set_tilt(value) elif command == 'bp': back_pan_tilt.set_pan(value) elif command == 'bt': back_pan_tilt.set_tilt(value)
def run_obstacles_avoidance(robot, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar): log(INFO, 'Starting obstacles avoidance mode') front_pan_tilt.reset() back_pan_tilt.reset () # ####################################### # # Find direction of the farthest obstacle # # ####################################### # log(INFO, 'Looking for the best direction to start') farthest_obstacle_distance, farthest_obstacle_angle = find_farthest_obstacle(PAN_ANGLE_MIN, PAN_ANGLE_MAX, OBSTACLE_FAST_SCANNING_STEP, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar) # ############################# # # Aim at this farthest obstacle # # ############################# # if farthest_obstacle_angle < 0: robot.right_with_angle(-farthest_obstacle_angle, True) else: robot.left_with_angle(farthest_obstacle_angle, True) # ############################## # # Now start avoiding obstacles # # ############################## # search_direction = False while (control.main_mode == MODE.AVOID_OBSTACLES) and (control.is_mode_aborted == False): if search_direction == True: robot.stop() robot.forward_step() # ####################################### # # Find direction of the farthest obstacle # # ####################################### # log(INFO, 'Looking for the best direction to continue') back_pan_tilt.reset() farthest_obstacle_distance, farthest_obstacle_angle = find_farthest_obstacle(PAN_ANGLE_MIN, PAN_ANGLE_MAX, OBSTACLE_FAST_SCANNING_STEP, front_pan_tilt, front_lidar, None, None) if farthest_obstacle_distance < AVOID_OBSTACLE_STOP_DISTANCE: front_pan_tilt.reset() farthest_obstacle_distance, farthest_obstacle_angle = find_farthest_obstacle(PAN_ANGLE_MIN, PAN_ANGLE_MAX, OBSTACLE_FAST_SCANNING_STEP, None, None, back_pan_tilt, back_lidar) # ############################# # # Aim at this farthest obstacle # # ############################# # if farthest_obstacle_angle < 0: robot.right_with_angle(-farthest_obstacle_angle, True) else: robot.left_with_angle(farthest_obstacle_angle, True) log(INFO, 'Aiming at the farthest obstacle') search_direction = False else: # ############################# # # Get distance of next obstacle # # ############################# # next_obstacle_distance = front_lidar.get_distance() # ################################ # # Now move forward that direction # # ################################ # if next_obstacle_distance < AVOID_OBSTACLE_STOP_DISTANCE: log(INFO, 'Obstacle ahead: changing direction') search_direction = True else: log(DEBUG, 'Path is clear: moving on...') robot.forward(TARGET_SPEED_STEP / 2) robot.stop() front_pan_tilt.reset() back_pan_tilt.reset ()
def run_along_obstacle(robot, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar): log(INFO, 'Starting along obstacle mode') front_pan_tilt.reset() back_pan_tilt.reset () # ###################################### # # Find direction of the closest obstacle # # ###################################### # log(INFO, 'Looking for the best direction to start') closest_obstacle_distance, closest_obstacle_angle = find_closest_obstacle(PAN_ANGLE_MIN, PAN_ANGLE_MAX, OBSTACLE_FAST_SCANNING_STEP, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar) # ##################################### # # Get parallel to this closest obstacle # # ##################################### # if closest_obstacle_angle < 0: log(INFO, 'Closest obstacle is on the right') is_obstacle_on_the_right = True if -90 < closest_obstacle_angle < 0: robot.left_with_angle(90 + closest_obstacle_angle, True) else: robot.right_with_angle(-closest_obstacle_angle - 90, True) else: log(INFO, 'Closest obstacle is on the left') is_obstacle_on_the_right = False if 0 < closest_obstacle_angle < 90: robot.right_with_angle(90 - closest_obstacle_angle, True) else: robot.left_with_angle(closest_obstacle_angle - 90, True) log(INFO, 'Now parallel to the closest obstacle') # ############################## # # Now move along obstacle # # ############################## # if is_obstacle_on_the_right == True: back_pan_tilt.pan_go_left() else: back_pan_tilt.pan_go_right() reference_distance = back_lidar.get_distance() log(INFO, 'Reference distance: {:06.3f}'.format(reference_distance)) while (control.main_mode == MODE.ALONG_OBSTACLE) and (control.is_mode_aborted == False): robot.forward(TARGET_SPEED_STEP) front_distance = front_lidar.get_distance() side_distance = back_lidar.get_distance() delta_distance = side_distance - reference_distance if front_distance < AVOID_OBSTACLE_STOP_DISTANCE: log(INFO, 'Obstacle ahead: changing direction') robot.stop() if is_obstacle_on_the_right == True: robot.left_with_angle(45, True) else: robot.right_with_angle(45, True) log(INFO, 'Ready to restart along obstacle') elif side_distance > 2 * reference_distance: log(INFO, 'Lost obstacle: changing direction') robot.stop() if is_obstacle_on_the_right == True: robot.right_with_angle(45, True) else: robot.left_with_angle(45, True) robot.forward(TARGET_SPEED_STEP) # Wait a bit for a move to be done time.sleep(1) log(INFO, 'Ready to restart along obstacle') elif -ALONG_OBSTACLE_PRECISION < delta_distance < ALONG_OBSTACLE_PRECISION: log(DEBUG, 'Obstacle is at the expected distance: moving on...') robot.stop_turn() elif delta_distance > 0: log(DEBUG, 'Going away from obstacle: correcting...') if is_obstacle_on_the_right == True: robot.right_with_strength(LEFT_RIGHT_TURN_STRENGTH) else: robot.left_with_strength(LEFT_RIGHT_TURN_STRENGTH) else: log(DEBUG, 'Going away from obstacle: correcting...') if is_obstacle_on_the_right == True: robot.left_with_strength(LEFT_RIGHT_TURN_STRENGTH) else: robot.right_with_strength(LEFT_RIGHT_TURN_STRENGTH) robot.stop() front_pan_tilt.reset() back_pan_tilt.reset ()
def stop(): robot.stop() return '{}'
import robot #adds custom functions you can use to drive the robot robot.move(forward) #move forward robot.delay(2) #do previous command for 2 seconds robot.stop() #stop print "test"
power_l = limit_value((current_y / 32768 * 100) + (current_x / 32768 * 100)) power_r = limit_value((current_y / 32768 * 100) - (current_x / 32768 * 100)) robot.left (power_l) robot.right(power_r) devices = [InputDevice(device) for device in list_devices()] keyboard = devices[0] print(keyboard) keypress_actions = { ecodes.ABS_X: store_x, ecodes.ABS_Y: store_y } current_x = 0 current_y = 0 power_l = 0 power_r = 0 try: #with open("values.log", "w") as log: for event in keyboard.read_loop(): if event.code in keypress_actions: #print(categorized, categorized.event.code, categorized.event.sec, categorized.event.timestamp, categorized.event.type, categorized.event.usec, categorized.event.value, file=log) keypress_actions[event.code](event.value) update_motor_powers() #print("x:", current_x, "y:", current_y, "power_l:", power_l, "power_r:", power_r, file=log) except KeyboardInterrupt: robot.stop()
def stop(): global currentCommand robot.stop() carLeds.execute("brake")
import robot import time robot = robot.Robot() print("mag_x, mag_y, mag_z") prevSample = robot.getMag() robot.startRight() startTime = time.perf_counter() while (time.perf_counter() - startTime < 30): curSample = robot.getMag() mag_x, mag_y, mag_z = curSample if ((prevSample[0] != mag_x) or (prevSample[1] != mag_y) or (prevSample[2] != mag_z)): print("%f, %f, %f" % (mag_x, mag_y, mag_z)) prevSample = curSample robot.stop()
def run_corridor_following(robot, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar): log(INFO, 'Starting corridor following mode') front_pan_tilt.reset() back_pan_tilt.reset () # ###################################### # # Find direction of the closest obstacle # # ###################################### # log(INFO, 'Looking for the best direction to start') closest_obstacle_distance, closest_obstacle_angle = find_closest_obstacle(PAN_ANGLE_MIN, PAN_ANGLE_MAX, OBSTACLE_FAST_SCANNING_STEP, front_pan_tilt, front_lidar, back_pan_tilt, back_lidar) # ############################ # # Aim at this closest obstacle # # ############################ # if closest_obstacle_angle < 0: robot.right_with_angle(-closest_obstacle_angle, True) else: robot.left_with_angle(closest_obstacle_angle, True) log(INFO, 'Now aiming at the closest obstacle') # ############################ # # Get centered in the corridor # # ############################ # is_centered = False while is_centered == False: front_wall_distance = front_lidar.get_distance() back_wall_distance = back_lidar.get_distance () delta_distance = front_wall_distance - back_wall_distance if -CORRIDOR_FOLLOWING_PRECISION < delta_distance < CORRIDOR_FOLLOWING_PRECISION: is_centered = True elif delta_distance > 0: robot.forward_step () else: robot.backward_step() log(INFO, 'Now centered in corridor') # ############################## # # Get into the forward direction # # ############################## # robot.right_with_angle(90, True) log(INFO, 'Ready to move in corridor') # ######################################## # # Now move forward in corridor and # # keep the best possible centered position # # ######################################## # front_pan_tilt.pan_go_left() back_pan_tilt.pan_go_left () while (control.main_mode == MODE.FOLLOW_CORRIDOR) and (control.is_mode_aborted == False): robot.forward(TARGET_SPEED_STEP) left_wall_distance = front_lidar.get_distance() right_wall_distance = back_lidar.get_distance () delta_distance = left_wall_distance - right_wall_distance if -CORRIDOR_FOLLOWING_PRECISION < delta_distance < CORRIDOR_FOLLOWING_PRECISION: log(DEBUG, 'Correctly centered in corridor: moving on...') robot.stop_turn() # Allow u-turn only when robot is centered if control.is_u_turn_requested == True: log(DEBUG, 'Performing u-turn as per requested') robot.stop() robot.right_with_angle(180, True) control.is_u_turn_requested = False elif delta_distance > 0: log(DEBUG, 'Going away from center: correcting to the left...') robot.left_with_strength(LEFT_RIGHT_TURN_STRENGTH / 2) else: log(DEBUG, 'Going away from center: correcting to the right...') robot.right_with_strength(LEFT_RIGHT_TURN_STRENGTH / 2) robot.stop() front_pan_tilt.reset() back_pan_tilt.reset ()