def main(): sihObj = SubImageHelp("/cameras/left_hand_camera/image") pihObj = PubImageHelp("/robot/xdisplay") rate = rospy.Rate(10) while not rospy.is_shutdown(): image = sihObj.getCVImage() print image try: pihObj.publishROSImage(image) rospy.sleep(1) print "OK" except: print "a"
def __init__(self, limb = 'right', file_name = "BaxterCamArmJointPosFile.dat"): # Load Baxter Cam Arm Joint Positions posFile = open(file_name, "r") self._camArmJoints = cPickle.load(posFile) posFile.close() topic_name = "/cameras/" + limb + "_hand_camera/image" self._sihObj = SubImageHelp(topic_name) self._limb = limb self._cam_arm = baxter_interface.limb.Limb(self._limb)
class moveGetCamArmImage: def __init__(self, limb = 'right', file_name = "BaxterCamArmJointPosFile.dat"): # Load Baxter Cam Arm Joint Positions posFile = open(file_name, "r") self._camArmJoints = cPickle.load(posFile) posFile.close() topic_name = "/cameras/" + limb + "_hand_camera/image" self._sihObj = SubImageHelp(topic_name) self._limb = limb self._cam_arm = baxter_interface.limb.Limb(self._limb) def moveCamArm(self): # Move Baxter Arm to Joint Position print "Moving Camera Arm..." self._cam_arm.set_joint_position_speed(0.5) # Joint movement speed self._cam_arm.move_to_joint_positions(self._camArmJoints, 5.0) print "Camera Arm Moved to Position" def getCamArmImage(self): # Get image frame from baxter hand camera print print "Waiting for image..." error_flag = True rate = rospy.Rate(1) rate.sleep() while error_flag == True: try: img = self._sihObj.getCVImage() error_flag = False print "BaxterImageObtained" rate.sleep() except: print print "Error in Obtaining Image" return img
def main(): # Start ROS Node rospy.init_node('GatherANNData') # Load Baxter Cam Arm Joint Positions posFile = open("BaxterCamArmJointPosFile.dat", "r") joints = cPickle.load(posFile) posFile.close() # Move Baxter Arm to Joint Position print "Moving Camera Arm..." cam_arm = baxter_interface.limb.Limb("right") cam_arm.set_joint_position_speed(0.5) # Joint movement speed cam_arm.move_to_joint_positions(joints, 5.0) print "Camera Arm Moved to Position" print "Press C to continue and get image frame" while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): pass time.sleep(1) # Get image frame from baxter hand camera print print "Waiting for image..." sihObj = SubImageHelp("/cameras/right_hand_camera/image") error_flag = True rate = rospy.Rate(1) rate.sleep() while error_flag == True: try: img = sihObj.getCVImage() error_flag = False print "BaxterImageObtained" rate.sleep() except: print print "Error in Obtaining Image" time.sleep(1) # Load Image Clipping Coordinates fileObj = open("ImageClipPosFile.dat", "r") image_clip = cPickle.load(fileObj) fileObj.close() # Clip Image img = img[:, image_clip[0] : image_clip[2] ] img = img[image_clip[1] : image_clip[3], :] # Load HSV Filter Values fileObj = open("HSVValFile.dat", "r") hsvcutoffs = cPickle.load(fileObj) fileObj.close() # Continue to image analysis # Create Windows and TrackBars cv2.namedWindow('Binary', cv2.WINDOW_NORMAL) cv2.namedWindow('Contour', cv2.WINDOW_NORMAL) cv2.namedWindow('Trackbars', cv2.WINDOW_NORMAL) cv2.createTrackbar('HueMin','Trackbars', hsvcutoffs[0], 179, nothing) cv2.createTrackbar('HueMax','Trackbars', hsvcutoffs[1], 179, nothing) cv2.createTrackbar('SatMin','Trackbars', hsvcutoffs[2], 255, nothing) cv2.createTrackbar('SatMax','Trackbars', hsvcutoffs[3], 255, nothing) cv2.createTrackbar('ValMin','Trackbars', hsvcutoffs[4], 255, nothing) cv2.createTrackbar('ValMax','Trackbars', hsvcutoffs[5], 255, nothing) # Initialize kernel and font kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (5, 5)) font = cv2.FONT_HERSHEY_SIMPLEX # Preprocess with gaussian blur blur = cv2.GaussianBlur(img,(5,5),0) blur_copy = img.copy() print print "Press C to Start Detection and C again to eneter values" while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): pass time.sleep(1) while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): hmin = cv2.getTrackbarPos('HueMin', 'Trackbars') smin = cv2.getTrackbarPos('SatMin', 'Trackbars') vmin = cv2.getTrackbarPos('ValMin', 'Trackbars') hmax = cv2.getTrackbarPos('HueMax', 'Trackbars') smax = cv2.getTrackbarPos('SatMax', 'Trackbars') vmax = cv2.getTrackbarPos('ValMax', 'Trackbars') hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV) lower_thresh = numpy.array([hmin, smin, vmin]) upper_thresh = numpy.array([hmax, smax, vmax]) mask = cv2.inRange(hsv, lower_thresh, upper_thresh) mask = cv2.bitwise_not(mask) median = cv2.medianBlur(mask,5) er = cv2.erode(median, kernel, iterations = 0) dl = cv2.dilate(er, kernel, iterations = 1) er = cv2.erode(dl, kernel, iterations = 1) dl = er cv2.imshow('Binary', dl) dl_copy = dl.copy() try: # Initialize necessary variables tc = 0 HuMomentsList = numpy.array([[0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0], [0.0]]) CentroidsList = numpy.array([[0.0], [0.0]]) delta = 400 blur_copy = img.copy() blur_copy2 = dl_copy.copy() # Find contours contours,hierarchy = cv2.findContours(dl, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # Start loop to find relevant contours for i in range(len(contours)): if (cv2.arcLength(contours[i], True) > delta) and (hierarchy[0, i, 3] == -1): cv2.drawContours(blur_copy, contours, i, (255,255,255), 1) m = cv2.moments(contours[i]) cen_x = int(m["m10"] / m["m00"]) cen_y = int(m["m01"] / m["m00"]) centroid = numpy.array([[cen_x], [cen_y]]) CentroidsList = numpy.hstack((CentroidsList, centroid)) cv2.putText(blur_copy, str(tc + 1), (cen_x, cen_y), font, 1,(0,255,100), 3, 8) HuMoments = cv2.HuMoments(m) I8 = m["nu11"] * ((m["nu30"]+m["nu12"])**2 - (m["nu03"]+m["nu21"])**2) - (m["nu20"]-m["nu02"])*(m["nu30"]+m["nu12"])*(m["nu03"]+m["nu21"]) HuMoments[6] = abs(HuMoments[6]) # Make reflection invariant HuMoments = numpy.vstack((HuMoments, I8)) # Modified Humoments HuMomentsList = numpy.hstack((HuMomentsList, HuMoments)) tc += 1 print "Number of instruments:", tc cv2.imshow('Contour', blur_copy) cv2.waitKey(1) except: pass cv2.destroyAllWindows() CentroidsList = CentroidsList[:, 1:(tc + 1)] HuMomentsList = HuMomentsList[:, 1:(tc + 1)] HuMomentsList = -numpy.sign(HuMomentsList) * numpy.log10(numpy.abs(HuMomentsList)) print # For 5th HuMoment value of 2nd instrument (Total 8 HuMoment Values for each) # HuMomentsList[4,1] # For X value value of 5th instrument (X:0 Y:1) # CentroidsList[0,4] print print"Centroids:" print CentroidsList print print "HuMoments:" print HuMomentsList print # HSV Cutoffs hsvcutoffs = numpy.array([[hmin], [hmax], [smin], [smax], [vmin], [vmax]]) hsvcutoffs_copy = hsvcutoffs.copy() for i in range(tc - 1): hsvcutoffs = numpy.hstack((hsvcutoffs, hsvcutoffs_copy.copy())) # Display Image print "Press Key to Continue" cv2.imshow('FinalImage', blur_copy) cv2.waitKey(0) print "Tool Values:" print "Arbitary: 0 Scalpel:1 Retractor:2 Hemostat:3 Scissors:4 Hook:5 Needle:6" # Get the tool values and append to Hu Moments tool_values = numpy.array([0.0]) for i in range(tc): value = raw_input("Enter value of tool %s:" % (i + 1)) value = numpy.array([ float(value[(len(value) - 1)]) ]) tool_values = numpy.hstack((tool_values, value)) tool_values = tool_values[1 : (tc+1)] HuMomentsAndToolValues = numpy.vstack((HuMomentsList, tool_values)) HuMomentsAndToolValues = numpy.vstack((HuMomentsAndToolValues, hsvcutoffs)) # Add file to ANNData for i in range(tc): cc = len(glob(join("ANNData", "*.txt"))) name_file = "Data" + str(cc + 1) + ".txt" full_name = join("ANNData", name_file) data = HuMomentsAndToolValues[:, i] print "Data added to %s" % full_name numpy.savetxt(full_name, data) cv2.destroyAllWindows() cv2.waitKey(0)
def main(): global img global image_corners global click_count global baxter_endpoints rospy.init_node('RetrieveCamToolTableCalib', disable_signals=True) # Record Baxter Coordinates print print "Record arm endpoints using Cuff Button and Big Button when 4 points are done (Left Arm)" cam_arm = baxter_interface.limb.Limb("left") bbhObj = BaxterButtonHelp("left") baxter_endpoints = [] click_count = 0 while_flag = True while(while_flag): cButtonPressed = bbhObj.getButtonStates()['cState'] bButtonPressed = bbhObj.getButtonStates()['bState'] if(cButtonPressed): pose = cam_arm.endpoint_pose() xyz = pose["position"] baxter_endpoints.append(xyz) click_count += 1 time.sleep(1) print "Baxter Arm EndPoint %s added" % (click_count) if(bButtonPressed): if (click_count == 4): while_flag = False print "Baxter Endpoints Recorded Recorded:" print baxter_endpoints elif (click_count < 4): while_flag = True baxter_endpoints = [] click_count = 0 print "Less than 4 points recorded. Please re-record all corners" else: while_flag = True baxter_endpoints = [] click_count = 0 print "More than 4 points recorded. Please record only the corners" time.sleep(1) ############################################################## print "Storing Retrieval Position" # Store retrieval position print print "Moving baxter cam arm to desired location image frame" cam_arm_x = ((baxter_endpoints[0].x + baxter_endpoints[2].x) / 2) + 0 #As camera not at center cam_arm_y = (baxter_endpoints[0].y + baxter_endpoints[2].y) / 2 + 0.003 #As camera is not at center cam_arm_z = ((baxter_endpoints[0].z + baxter_endpoints[2].z) / 2) + 0 # Height above table # All orientation calclulations voObj = VectorOrientation() voObj.setVectorChange(0.00, 0.00, -1.0) # X, Y and Z Vector Axis Orientation #voObj.setAngleOrientation(-20, 0, 0, 'd') voObj.calcVectorOrientation() # Set gamma in degrees voObj.setGamma(180, 'd') # Trick for baxter orientation change for rigth arm: voObj.setAngleOrientation(-voObj._theta, voObj._phi, voObj._gamma, 'r') # Calculate quaternion quat = voObj.getQuaternionOrientation() # Inverse Kinematics arm = IKSolveBaxter("left") arm.setArmEndPos(cam_arm_x, cam_arm_y, cam_arm_z, quat[0], quat[1], quat[2], quat[3]) arm.ik_solve(); ret_joints = arm.getBaxterJointSpace() bcajpFile = open("BaxterRetArmJointPosFile.dat", "w"); cPickle.dump(ret_joints, bcajpFile); bcajpFile.close() # Move Arm control_arm = baxter_interface.limb.Limb("left") control_arm.set_joint_position_speed(0.5) # Joint movement speed control_arm.move_to_joint_positions(ret_joints, 5.0) time.sleep(1) ############################################################## # Move baxter arm print print "Moving baxter cam arm to desired location image frame" cam_arm_x = ((baxter_endpoints[0].x + baxter_endpoints[2].x) / 2) + 0.075 #As camera not at center cam_arm_y = (baxter_endpoints[0].y + baxter_endpoints[2].y) / 2 + 0.005 #As camera is not at center cam_arm_z = ((baxter_endpoints[0].z + baxter_endpoints[2].z) / 2) + 0.15 # Height above table # All orientation calclulations voObj = VectorOrientation() voObj.setVectorChange(0.00, 0.00, -1.0) # X, Y and Z Vector Axis Orientation #voObj.setAngleOrientation(-20, 0, 0, 'd') voObj.calcVectorOrientation() # Set gamma in degrees voObj.setGamma(180, 'd') # Trick for baxter orientation change for rigth arm: voObj.setAngleOrientation(-voObj._theta, voObj._phi, voObj._gamma, 'r') # Calculate quaternion quat = voObj.getQuaternionOrientation() # Inverse Kinematics arm = IKSolveBaxter("left") arm.setArmEndPos(cam_arm_x, cam_arm_y, cam_arm_z, quat[0], quat[1], quat[2], quat[3]) arm.ik_solve(); joints = arm.getBaxterJointSpace() # Move Arm control_arm = baxter_interface.limb.Limb("left") control_arm.set_joint_position_speed(0.5) # Joint movement speed control_arm.move_to_joint_positions(joints, 5.0) os.system("clear") print "Press C to continue and store baxter camera arm position" while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): pass bcajpFile = open("RetrieveBaxterCamArmJointPosFile.dat", "w"); cPickle.dump(joints, bcajpFile); bcajpFile.close() print print "Baxter Camera Arm Position Stored" # Get image frame from baxter hand camera print print "Waiting for image..." sihObj = SubImageHelp("/cameras/left_hand_camera/image") error_flag = True rate = rospy.Rate(1) rate.sleep() while error_flag == True: try: img = sihObj.getCVImage() error_flag = False print "BaxterImageObtained" rate.sleep() except: print print "Error in Obtaining Image" # All recording points stuff print print "Record all 4 corner points using mouse double click and then press key to continue" img_copy = img.copy() while_flag = True while(while_flag): img = img_copy.copy() click_count = 0 image_corners = [] cv2.namedWindow('RetrieveImage') cv2.imshow('RetrieveImage', img) cv2.setMouseCallback('RetrieveImage',handle_mouse) cv2.waitKey(0) if (click_count == 4): while_flag = False print "Image Corners Recorded" print print "The image corners are:" print image_corners[0], image_corners[1], image_corners[2], image_corners[3] elif (click_count < 4): while_flag = True print "Less than 4 points recorded. Please re-record all corners" else: while_flag = True print "More than 4 points recorded. Please record only the corners" #cv2.destroyAllWindows() time.sleep(1) X = numpy.matrix([ [image_corners[0][0], image_corners[2][0], image_corners[3][0]], [image_corners[0][1], image_corners[2][1], image_corners[3][1]], [1.0, 1.0, 1.0] ]) A = numpy.matrix([ [baxter_endpoints[0].x, baxter_endpoints[2].x, baxter_endpoints[3].x], [baxter_endpoints[0].y, baxter_endpoints[2].y, baxter_endpoints[3].y], [baxter_endpoints[0].z, baxter_endpoints[2].z, baxter_endpoints[3].z] ]) # Add Image Clip positions and Display Clipped Image x1 = max( [image_corners[0][0], image_corners[3][0]] ) y1 = max( [image_corners[0][1], image_corners[1][1]] ) x2 = min( [image_corners[1][0], image_corners[2][0]] ) y2 = min( [image_corners[2][1], image_corners[3][1]] ) image_clip = [x1, y1, x2, y2] img = img_copy[:, image_clip[0] : image_clip[2] ] img = img[image_clip[1] : image_clip[3], :] cv2.imwrite('RetrieveImage.png', img) ImageClipPosFile = open("RetrieveImageClipPosFile.dat", "w"); cPickle.dump(image_clip, ImageClipPosFile); ImageClipPosFile.close() print "Image Clip Positions Added To File" time.sleep(1) print print "Press C to continue and calculate tranformation matrix" while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): pass os.system("clear") print "Image Corner Pixel Locations:" print X print print "Baxter Endpoint Locations:" print A print XAPosFile = open("RetrieveXAPosFile.dat", "w"); cPickle.dump(X, XAPosFile); cPickle.dump(A, XAPosFile); XAPosFile.close() time.sleep(1) print "X(ImageCorners) and A(BaxterEndpoints) Stored in XAPosFile.dat" print if(numpy.linalg.det(X) == 0): print "Determiant of image corner matrix = 0" print "Please retry calibration" else: T = numpy.matrix( numpy.dot( A, numpy.linalg.inv(X) ) ) print "Tranformation Matrix:" print T print print "Writing to transformation matrix file..." tmatFile = open("RetrieveTransformationMatrixFile.dat", "w"); cPickle.dump(T, tmatFile); tmatFile.close() print "Tranformation matrix added" print time.sleep(1) print "Press C to continue and exit program" while(not sf.Keyboard.is_key_pressed(sf.Keyboard.C)): pass rospy.signal_shutdown("Shutting down ROS Node to calculate transformation") print print "Exit Program" print return 0