示例#1
0
    def init_camera(self):

        openni2.initialize()
        nite2.initialize()
        self.dev = openni2.Device.open_any()

        dev_name = self.dev.get_device_info().name.decode('UTF-8')
        print("Device Name: {}".format(dev_name))
        self.use_kinect = False
        if dev_name == 'Kinect':
            self.use_kinect = True
            print('using Kinect.')

        try:
            self.user_tracker = nite2.UserTracker(self.dev)
        except utils.NiteError:
            print("Unable to start the NiTE human tracker. Check "
                  "the error messages in the console. Model data "
                  "(s.dat, h.dat...) might be inaccessible.")
            sys.exit(-1)

        (self.img_w, self.img_h
         ) = CAPTURE_SIZE_KINECT if self.use_kinect else CAPTURE_SIZE_OTHERS
        self.win_w = 256
        self.win_h = int(self.img_h * self.win_w / self.img_w)
示例#2
0
def capture_skeleton():
    args = parse_arg()
    dev = init_capture_device()

    dev_name = dev.get_device_info().name.decode('UTF-8')
    print("Device Name: {}".format(dev_name))
    use_kinect = False
    if dev_name == 'Kinect':
        use_kinect = True
        print('using Kinect.')

    try:
        user_tracker = nite2.UserTracker(dev)
    except utils.NiteError:
        print("Unable to start the NiTE human tracker. Check "
              "the error messages in the console. Model data "
              "(s.dat, h.dat...) might be inaccessible.")
        sys.exit(-1)

    (img_w, img_h) = CAPTURE_SIZE_KINECT if use_kinect else CAPTURE_SIZE_OTHERS
    win_w = args.window_width
    win_h = int(img_h * win_w / img_w)

    while True:
        ut_frame = user_tracker.read_frame()

        depth_frame = ut_frame.get_depth_frame()
        depth_frame_data = depth_frame.get_buffer_as_uint16()
        img = np.ndarray((depth_frame.height, depth_frame.width), dtype=np.uint16,
                         buffer=depth_frame_data).astype(np.float32)
        if use_kinect:
            img = img[0:img_h, 0:img_w]

        (min_val, max_val, min_loc, max_loc) = cv2.minMaxLoc(img)
        if (min_val < max_val):
            img = (img - min_val) / (max_val - min_val)
        img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)

        if ut_frame.users:
            for user in ut_frame.users:
                if user.is_new():
                    print("new human id:{} detected.".format(user.id))
                    user_tracker.start_skeleton_tracking(user.id)
                elif (user.state == nite2.UserState.NITE_USER_STATE_VISIBLE and
                      user.skeleton.state == nite2.SkeletonState.NITE_SKELETON_TRACKED):
                    draw_skeleton(img, user_tracker, user, (255, 0, 0))

        cv2.imshow("Depth", cv2.resize(img, (win_w, win_h)))
        
        #if (cv2.waitKey(100) & 0xFF == ord('q')):
        if (cv2.waitKey(1)==27):
            cv2.destroyAllWindows()
            break

    close_capture_device()
    def update(self):

        dev = self.init_capture_device()

        # display camera information
        dev_name = dev.get_device_info().name.decode('UTF-8')
        print("Device Name: {}".format(dev_name))
        use_kinect = False
        if dev_name == 'Kinect':
            use_kinect = True
            print('using Kinect.')

        # start nite2
        try:
            user_tracker = nite2.UserTracker(dev)
        except utils.NiteError:
            print("Unable to start the NiTE human tracker. Check "
                  "the error messages in the console. Model data "
                  "(s.dat, h.dat...) might be inaccessible.")
            sys.exit(-1)

        (self.img_w, self.img_h
         ) = CAPTURE_SIZE_KINECT if use_kinect else CAPTURE_SIZE_OTHERS

        while not self.isDead:
            ut_frame = user_tracker.read_frame()

            depth_frame = ut_frame.get_depth_frame()
            depth_frame_data = depth_frame.get_buffer_as_uint16()
            img = np.ndarray((depth_frame.height, depth_frame.width),
                             dtype=np.uint16,
                             buffer=depth_frame_data).astype(np.float32)
            if use_kinect:
                img = img[0:self.img_h, 0:self.img_w]

            (min_val, max_val, min_loc, max_loc) = cv2.minMaxLoc(img)
            if (min_val < max_val):
                img = (img - min_val) / (max_val - min_val)
            img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)

            if ut_frame.users:
                for user in ut_frame.users:
                    if user.is_new():
                        print("new human id:{} detected.".format(user.id))
                        user_tracker.start_skeleton_tracking(user.id)
                    elif (user.state == nite2.UserState.NITE_USER_STATE_VISIBLE
                          and user.skeleton.state
                          == nite2.SkeletonState.NITE_SKELETON_TRACKED):
                        self.draw_skeleton(img, user_tracker, user,
                                           (255, 0, 0))
            self.frame = img
            self.close_capture_device()
示例#4
0
    def run(self):
        dev = self.init_capture_device()
        colorStream = dev.create_color_stream()

        dev_name = dev.get_device_info().name.decode('UTF-8')
        print("Device Name: {}".format(dev_name))
        use_kinect = False
        if dev_name == 'Kinect':
            use_kinect = True
            print('using Kinect.')
        try:
            user_tracker = nite2.UserTracker(dev)
        except utils.NiteError:
            print("Unable to start the NiTE human tracker. Check "
                  "the error messages in the console. Model data "
                  "(s.dat, h.dat...) might be inaccessible.")
            sys.exit(-1)

        (
            img_w, img_h
        ) = self.CAPTURE_SIZE_KINECT if use_kinect else self.CAPTURE_SIZE_OTHERS
        win_w = 1024
        win_h = int(img_h * win_w / img_w)

        colorStream.start()

        while self.active:
            colorFrame = colorStream.read_frame()
            colorFrameData = colorFrame.get_buffer_as_triplet()
            cimg = np.ndarray((colorFrame.height, colorFrame.width, 3),
                              dtype=np.uint8,
                              buffer=colorFrameData)

            ut_frame = user_tracker.read_frame()

            depth_frame = ut_frame.get_depth_frame()
            depth_frame_data = depth_frame.get_buffer_as_uint16()

            simg = np.ndarray((depth_frame.height, depth_frame.width),
                              dtype=np.uint16,
                              buffer=depth_frame_data)

            img = simg.astype(np.float32)

            (min_val, max_val, min_loc, max_loc) = cv2.minMaxLoc(img)

            if (min_val < max_val):
                img = (img - min_val) / (max_val - min_val)
            img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)

            if ut_frame.users:
                for user in ut_frame.users:
                    if user.is_new():
                        print("new human id:{} detected.".format(user.id))
                        user_tracker.start_skeleton_tracking(user.id)
                    elif (user.state == nite2.UserState.NITE_USER_STATE_VISIBLE
                          and user.skeleton.state
                          == nite2.SkeletonState.NITE_SKELETON_TRACKED):
                        self.draw_skeleton(img, user_tracker, user)
                        if self.record:
                            skeltonAdded = True
                            self.addSkelton(user_tracker, user)

            if self.record:
                if self.frameCount == 0:
                    self.recordStartTime = datetime.datetime.now().strftime(
                        "%Y-%m-%d-%H-%M-%S-%f")
                    self.dirPath = self.dirPath + self.recordStartTime
                    writer = cv2.VideoWriter(self.dirPath + ".avi",
                                             cv2.VideoWriter_fourcc(*'XVID'),
                                             25, (640, 480), True)
                self.frameCount += 1
                dirPath = self.dirPath + "__" + \
                    str(self.frameCount)+"__" + \
                    datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S-%f")
                np.array(simg).tofile(dirPath + "depth.bin")
                writer.write(cv2.cvtColor(cimg, cv2.COLOR_BGR2RGB))
                cv2.putText(img, 'Recording', self.bottomLeftCornerOfText,
                            self.font, self.fontScale, self.fontColor,
                            self.lineType)

            cv2.imshow("Depth", cv2.resize(img, (win_w, win_h)))
            if (cv2.waitKey(1) & 0xFF == ord('q')):
                break

        self.recordFinishTime = datetime.datetime.now().strftime(
            "%Y-%m-%d-%H-%M-%S-%f")
        self.close_capture_device()
        cv2.destroyAllWindows()
        if self.frameCount > 0:
            print("Writing Started")
            self.dirPath = self.dirPath + '__' + self.recordFinishTime
            print("Writing Finished")
        print("Frame Count : ", self.frameCount)
示例#5
0
    #parser.add_argument("-d", "--debug", help="run in interactive, debug mode", action="store_true")
    args = parser.parse_args()

    ### OpenNI/NiTE initialization
    openni2.initialize()
    nite2.initialize()

    logger.info("Opening a freenect device...")
    dev = openni2.Device.open_any()
    info = dev.get_device_info()
    logger.info("Device <%s %s> successfully opened." %
                (info.vendor, info.name))

    logger.info("Loading the NiTE user tracker...")
    try:
        userTracker = nite2.UserTracker(dev)
    except utils.NiteError as ne:
        logger.error("Unable to start the NiTE human tracker. Check "
                     "the error messages in the console. Model data "
                     "(s.dat, h.dat...) might be missing.")
        sys.exit(-1)
    logger.info("User tracker loaded.")

    logger.info("Now waiting for humans...")
    #############

    with underworlds.Context("Human tracker") as ctx:

        world = ctx.worlds[args.world]
        nodes = world.scene.nodes
示例#6
0
                    [0, 0, 0, 1]])
right_tf = np.array([[np.cos(right_theta), 0, -np.sin(right_theta), right_x],
                    [0, 1, 0, right_y],
                    [np.sin(right_theta), 0, np.cos(right_theta), 0],
                    [0, 0, 0, 1]])

device_0_is_left_kinect = True
if device_0_is_left_kinect:
    tf_list = [left_tf, right_tf]
else:
    tf_list = [right_tf, right_tf]

try:
    #userTracker = nite2.UserTracker(dev)
    for i in range(0,len(devList)):
        utList.append(nite2.UserTracker(devList[i]))
        
except utils.NiteError as ne:
    logger.error("Unable to start the NiTE human tracker. Check "
                 "the error messages in the console. Model data "
                 "(s.dat, h.dat...) might be inaccessible.")
    sys.exit(-1)

csList=[]
for i in range(0,len(devList)):
    dev = devList[i]
    color_stream = dev.create_color_stream()
    color_stream.start()
    
    csList.append(color_stream)
def pegarMovimentos():
    global player1HandRightx
    global player1HandRighty
    global player1HandLeftx
    global player1HandLefty

    global player2HandRightx
    global player2HandRighty
    global player2HandLefty
    global player2HandLeftx

    global joints

    global leoteste

    #openni2.initialize("/home/leonardo/Downloads/OpenNI-Linux-x64-2.2/Redist")
    openni2.initialize("/home/leonardo/Tcc/OpenNI-Linux-x64-2.2/Redist")
    #nite2.initialize("/home/leonardo/Downloads/NiTE-Linux-x64-2.2/Redist")
    nite2.initialize("/home/leonardo/Tcc/NiTE-2.0.0/Redist")

    dev = openni2.Device.open_any()
    device_info = dev.get_device_info()
    try:
        userTracker = nite2.UserTracker(dev)
    except utils.NiteError as ne:
        print "entrou em exept"
        print(
            "Unable to start the NiTE human tracker. Check the error messages in the console. Model data (s.dat, h.dat...) might be inaccessible."
        )
        print(ne)
        sys.exit(-1)
        print "antes do while"

    while True:
        frame = userTracker.read_frame()
        depth_frame = frame.get_depth_frame()

        if frame.users:
            i = 1
            for user in frame.users:
                user.id = i
                i += 1
                if user.skeleton.state == nite2.SkeletonState.NITE_SKELETON_TRACKED:
                    print user.id
                else:
                    print user.id
                    print user.is_new()
                    print user.is_visible()
                    print("Skeleton state: " + str(user.skeleton.state))
                    #print 'leo' + str(user.id)
                    #if user.id == 0 and leoteste==0:
                    #	print 'e zero'
                    #	leoteste=+1
                    userTracker.start_skeleton_tracking(user.id)
                    print("Skeleton state: " + str(user.skeleton.state))
                    #	userTracker.start_skeleton_tracking(2)
                    #if user.is_new():
                    #	print("New human detected! ID: %d Calibrating...", user.id)
                    #	userTracker.start_skeleton_tracking(1)
                    #elif user.skeleton.state == nite2.SkeletonState.NITE_SKELETON_TRACKED:
                    #else:
                    #print user.skeleton.joints
                    #print user.id
                    #print str(user.is_visible())
                    preenchermovimento(user.id, user.skeleton.joints)
        else:
            print("No users")

    nite2.unload()
    openni2.unload()
示例#8
0
    def __init__(self,
                 use_thread=False,
                 sleep_dt=0.,
                 verbose=False,
                 track_hand=False):
        """
        Initialize the Kinect input interface using the `openni` library.

        Args:
            use_thread (bool): If True, it will run the interface in a separate thread than the main one.
                The interface will update its data automatically.
            sleep_dt (float): If :attr:`use_thread` is True, it will sleep the specified amount before acquiring
                the next sample.
            verbose (bool): If True, it will print information about the state of the interface. This is let to the
                programmer what he / she wishes to print.
            track_hand (bool): If True, it will track the hands.
        """
        # initialize openni2 and nite2; you can give the path to the library as an argument.
        # Otherwise, it will look for OPENNI2_REDIST / OPENNI2_REDIST64 and NITE2_REDIST / NITE2_REDIST64 environment
        # variables.
        openni2.initialize()
        nite2.initialize()

        # open all the devices
        devices = openni2.Device.open_all()

        # get the correct device (Microsoft Kinect)
        self.device = None
        for device in devices:
            info = device.get_device_info()
            if info.vendor == 'Microsoft' and info.name == 'Kinect':  # Kinect Interface
                self.device = device
                break

        # If didn't find it, return an error
        if self.device is None:
            devices = [device.get_device_info() for device in devices]
            raise ValueError(
                "No Asus devices were detected; we found these devices instead: {}"
                .format(devices))

        if verbose:
            print(self.device.get_device_info())

        # create tracker for the hand or user depending on the given parameter
        if track_hand:
            self.tracker = nite2.HandTracker(self.device)
        else:
            self.tracker = nite2.UserTracker(self.device)

        # data
        self.joints = [
            'head', 'neck', 'torso', 'left_shoulder', 'left_elbow',
            'left_hand', 'left_hip', 'left_knee', 'left_foot',
            'right_shoulder', 'right_elbow', 'right_hand', 'right_hip',
            'right_knee', 'right_foot'
        ]
        joint = nite2.JointType
        self.nite_joints = [
            joint.NITE_JOINT_HEAD, joint.NITE_JOINT_NECK,
            joint.NITE_JOINT_TORSO, joint.NITE_JOINT_LEFT_SHOULDER,
            joint.NITE_JOINT_LEFT_ELBOW, joint.NITE_JOINT_LEFT_HAND,
            joint.NITE_JOINT_LEFT_HIP, joint.NITE_JOINT_LEFT_KNEE,
            joint.NITE_JOINT_LEFT_FOOT, joint.NITE_JOINT_RIGHT_SHOULDER,
            joint.NITE_JOINT_RIGHT_ELBOW, joint.NITE_JOINT_RIGHT_HAND,
            joint.NITE_JOINT_RIGHT_HIP, joint.NITE_JOINT_RIGHT_KNEE,
            joint.NITE_JOINT_RIGHT_FOOT
        ]

        self.data = {}

        super(KinectSkeletonTrackingInterface,
              self).__init__(use_thread=use_thread,
                             sleep_dt=sleep_dt,
                             verbose=verbose)