def main(): global block_t, starfish_t cv2.namedWindow('image') perception_obj = Perception() # Make trackbars, with default values above cv2.createTrackbar('Block Threshold', 'image', block_t, 255, nothing) cv2.createTrackbar('Starfish Threshold', 'image', starfish_t, 255, nothing) print( "Showing video. With the CV window in focus, press q to exit, p to pause." ) while (1): # get current positions of trackbars block_t = cv2.getTrackbarPos('Block Threshold', 'image') starfish_t = cv2.getTrackbarPos('Starfish Threshold', 'image') perception_obj.block_t = block_t perception_obj.starfish_t = starfish_t scene = perception_obj.get_all_targets() img = perception_obj.frame img = label_scene(img, scene) if img is not None: cv2.imshow('image', img) key = cv2.waitKey(66) # Delay for 66 ms if key == ord('q'): # Press q to exit, p to pause break if key == ord('p'): cv2.waitKey(-1) #wait until any key is pressed cv2.destroyAllWindows()
from ArmIK.Transform import convertCoordinate import db_txt as db if __name__ == '__main__': count = 0 perception_obj = Perception() paw_obj = Paw() final_pos = (-15 + 0.5, 12 - 0.5, 1.5) # x block home img_size = (640, 480) while True: # perception code count = count + 1 #perception_obj.get_frame() scene = perception_obj.get_all_targets() img = perception_obj.frame img = label_scene(img, scene) if img is not None: print(scene) show_image(img) # arm code graspInfo = db.getGraspDB() if 'x' in graspInfo: # if grasp info updated # pick up object from set position x_pos = graspInfo['x'] y_pos = graspInfo['y'] a_pos = graspInfo['angle'] world_x, world_y = convertCoordinate( x_pos, y_pos, img_size) # Convert to real world coordinates