class SkeletonTrackerNode: def __init__(self): rospy.init_node('skeleton_node', anonymous=True) # self.recog = PyPoseRecognizer(22, WIDTH, HEIGHT,self.forest_file,USE_CPU, 320) self.grabber = PyOpenNIHandGrabber() def run(self): while not rospy.is_shutdown(): print("Wave the hand in front of the sensor \n") self.found_mask = False while not rospy.is_shutdown() and not self.find_hand(): pass self.found_mask = True while not rospy.is_shutdown() and self.track_skeleton(): pass def find_hand(self): self.rgb, self.depth = self.grabber.grabFrames() # pos = self.grabber.getHand3DPos() self.show_image() # if len(pos) > 2: # if pos[0] or pos[1] or pos[2]: # return True return False def track_skeleton(self): self.rgb, self.depth = self.grabber.grabFrames() pos = self.grabber.getHand3DPos() if not pos[0] and not pos[1] and not pos[2]: print("Hand position lost...") return False self.mask = self.grabber.segment(self.depth, pos, RADIUS) joints = self.recog.getJoints(self.depth, self.mask) self.pub_skeleton(joints) self.show_image() return True def show_image(self): to_show = cvtColor(self.rgb, COLOR_RGB2BGR) # if (self.found_mask): # blank_image = np.zeros(to_show.shape, np.uint8) # blank_image[:,:] = (0,0,255) # rows,cols = self.mask.shape # M = np.float32([[1,0,-20],[0,1,0]]) # self.mask = warpAffine(self.mask,M,(cols, rows)) # blank_image = bitwise_and(blank_image, blank_image, mask = self.mask) # to_show = addWeighted(to_show, 1, blank_image, 0.4, 0) imshow("Image", to_show) k = waitKey(30) def pub_skeleton(self, joints): msg = hand_skeleton() for j in joints: msg.joints.append(Point(j[0], j[1], j[2])) self.skeleton_pub.publish(msg)
class SkeletonTrackerNode: def __init__(self): rospy.init_node('skeleton_node', anonymous=True) # self.recog = PyPoseRecognizer(22, WIDTH, HEIGHT,self.forest_file,USE_CPU, 320) self.grabber = PyOpenNIHandGrabber() def run(self): while not rospy.is_shutdown(): print("Wave the hand in front of the sensor \n") self.found_mask = False while not rospy.is_shutdown() and not self.find_hand(): pass self.found_mask = True while not rospy.is_shutdown() and self.track_skeleton(): pass def find_hand(self): self.rgb, self.depth = self.grabber.grabFrames() # pos = self.grabber.getHand3DPos() self.show_image() # if len(pos) > 2: # if pos[0] or pos[1] or pos[2]: # return True return False def track_skeleton(self): self.rgb, self.depth = self.grabber.grabFrames() pos = self.grabber.getHand3DPos() if not pos[0] and not pos[1] and not pos[2]: print ("Hand position lost...") return False self.mask = self.grabber.segment(self.depth, pos, RADIUS) joints = self.recog.getJoints(self.depth, self.mask) self.pub_skeleton(joints) self.show_image() return True def show_image(self): to_show = cvtColor(self.rgb, COLOR_RGB2BGR) # if (self.found_mask): # blank_image = np.zeros(to_show.shape, np.uint8) # blank_image[:,:] = (0,0,255) # rows,cols = self.mask.shape # M = np.float32([[1,0,-20],[0,1,0]]) # self.mask = warpAffine(self.mask,M,(cols, rows)) # blank_image = bitwise_and(blank_image, blank_image, mask = self.mask) # to_show = addWeighted(to_show, 1, blank_image, 0.4, 0) imshow("Image", to_show) k = waitKey(30) def pub_skeleton(self, joints): msg = hand_skeleton() for j in joints: msg.joints.append(Point(j[0], j[1],j[2])) self.skeleton_pub.publish(msg)
def __init__(self): rospy.init_node('skeleton_node', anonymous=True) self.forest_file = rospy.get_param('~forest', '../xml/forest-1layer.xml') self.skeleton_topic = rospy.get_param('skeleton_topic', '/skeleton') self.skeleton_pub = rospy.Publisher(self.skeleton_topic, hand_skeleton, queue_size=10) self.recog = PyPoseRecognizer(WIDTH, HEIGHT, self.forest_file, USE_CPU, 320) self.grabber = PyOpenNIHandGrabber()
def __init__(self): rospy.init_node('skeleton_node', anonymous=True) self.forest_file = rospy.get_param('~forest', '../xml/forest-1layer.xml') self.skeleton_topic = rospy.get_param('skeleton_topic', '/skeleton') self.skeleton_pub = rospy.Publisher(self.skeleton_topic, hand_skeleton, queue_size=10) self.recog = PyPoseRecognizer(WIDTH, HEIGHT,self.forest_file,USE_CPU, 320) self.grabber = PyOpenNIHandGrabber()
if len(sys.argv)!=3: print("Usage: > python script_name forest1 forest2") else: # client = ClientSocket(IP, PORT, 'P'*16) #CV2 Windows namedWindow("rgb") namedWindow("masc") namedWindow("segno") #OpenNi tracker and Cypher Initialization recog = PyPoseRecognizer(22, WIDTH, HEIGHT, sys.argv[1], USE_CPU, 320) grabber = PyOpenNIHandGrabber() # clf = joblib.load(sys.argv[2]) # crypt = AES.new(PASSCODE) #OpenNi wave to start tracking for i in range(0,MAX_POSES): #for i in range(0,10): print("Wave the hand in front of the sensor \n") while True: rgb, depth = grabber.grabFrames() pos = grabber.getHand3DPos() if len(pos) > 2: if pos[0] or pos[1] or pos[2]: break
if __name__ == "__main__": if len(sys.argv) != 3: print("Usage: > python script_name forest1 forest2") else: client = ClientSocket(IP, PORT, 'P' * 16) #CV2 Windows namedWindow("rgb") namedWindow("masc") namedWindow("segno") #OpenNi tracker and Cypher Initialization recog = PyPoseRecognizer(WIDTH, HEIGHT, sys.argv[1], USE_CPU, 320) grabber = PyOpenNIHandGrabber() lista = create_list() clf = joblib.load(sys.argv[2]) crypt = AES.new(PASSCODE) #OpenNi wave to start tracking for i in range(0, MAX_POSES): #for i in range(0,10): print("Wave the hand in front of the sensor \n") while True: rgb, depth = grabber.grabFrames() pos = grabber.getHand3DPos() if len(pos) > 2: if pos[0] or pos[1] or pos[2]: break
def __init__(self): rospy.init_node('skeleton_node', anonymous=True) # self.recog = PyPoseRecognizer(22, WIDTH, HEIGHT,self.forest_file,USE_CPU, 320) self.grabber = PyOpenNIHandGrabber()