def func(f):
    img = cv2.imread(f[0], cv2.IMREAD_ANYDEPTH).astype(np.float32)
    output, label, _ = seg_hand_depth(img, label=f[1])
    cv2.imwrite(f[2], output)
    np.save('{}.npy'.format(f[2][:-4]), label)
    # copyfile(f[0], f[2])
    print(f[2])
def func2(f):
    copyfile(f[0], f[1])
    img = cv2.imread(f[1], cv2.IMREAD_ANYDEPTH).astype(np.float32)
    if np.max(img) == np.min(img) == 0:
        print(f[1], ' bad!')
        return
    try:
        output = seg_hand_depth(img, 500, 1000, 10, 100, 4, 4, 250, True, 300)
        cv2.imwrite(f[1], output)
    except:
        print(f[1])
def func2(f):
    copyfile(f[0], f[1])
    img = cv2.imread(f[1], cv2.IMREAD_ANYDEPTH).astype(np.float32)
    if np.max(img) == np.min(img) == 0:
        print(f[1], ' bad!')
        return
    try:
        output, label, crop_data1 = seg_hand_with_label(img, f[2])
        output, label, crop_data2 = seg_hand_depth(output, 500, 1000, 10, 100, 4, 4, 250, True, 300, label=label)
        cv2.imwrite(f[1], output)
        np.save(f[1][:-4]+'.npy', label)
        np.save(f[1][:-4]+'_crop1.npy', crop_data1)
        np.save(f[1][:-4]+'_crop2.npy', crop_data2)
    except:
        print(f[1])
    def online_once(self):
        while True:
            img_data = rospy.wait_for_message("/camera/depth/image_raw", Image)
            rospy.loginfo("Got an image ^_^")
            try:
                img = self.bridge.imgmsg_to_cv2(img_data,
                                                desired_encoding="passthrough")
            except CvBridgeError as e:
                rospy.logerr(e)

            try:
                # preproces
                img = seg_hand_depth(img, 500, 1000, 10, 100, 4, 4, 250, True,
                                     300)
                img = img.astype(np.float32)
                img = img / 255. * 2. - 1

                n = cv2.resize(img, (0, 0), fx=2, fy=2)
                n1 = cv2.normalize(n,
                                   n,
                                   alpha=0,
                                   beta=1,
                                   norm_type=cv2.NORM_MINMAX,
                                   dtype=cv2.CV_32F)
                cv2.imshow("segmented human hand", n1)
                cv2.waitKey(1)

                # get the clipped joints
                goal = self.joint_cal(img, isbio=True)
                start = self.mgi.get_current_joint_values()

                # collision check and manipulate
                csl_client = rospy.ServiceProxy('CheckSelfCollision',
                                                checkSelfCollision)
                try:
                    shadow_pos = csl_client(start, tuple(goal))
                    if shadow_pos.result:
                        rospy.loginfo("Move Done!")
                    else:
                        rospy.logwarn("Failed to move!")
                except rospy.ServiceException as exc:
                    rospy.logwarn("Service did not process request: " +
                                  str(exc))

                rospy.loginfo("Next one please ---->")
            except:
                rospy.loginfo("no images")
    def online_once(self):
        img_data = rospy.wait_for_message(
            "/camera/aligned_depth_to_color/image_raw", Image)
        rospy.loginfo("Got an image ^_^")
        try:
            img = self.bridge.imgmsg_to_cv2(img_data,
                                            desired_encoding="passthrough")
        except CvBridgeError as e:
            rospy.logerr(e)

        #frames = self.pipeline.wait_for_frames()
        #depth_frame = frames.get_depth_frame()
        #if not depth_frame:
        #    return
        #rospy.loginfo("Got an image from realsense SDK ^_^")
        #img = np.asanyarray(depth_frame.get_data())

        try:
            # preproces
            img = seg_hand_depth(img, 500, 1000, 10, 100, 4, 4, 250, True, 300)
            img = img.astype(np.float32)
            img = img / 255. * 2. - 1

            img = cv2.resize(img, (100, 100)).astype(np.float32)
            n = cv2.resize(img, (0, 0), fx=2, fy=2)
            n1 = cv2.normalize(n,
                               n,
                               alpha=0,
                               beta=1,
                               norm_type=cv2.NORM_MINMAX,
                               dtype=cv2.CV_32F)
            cv2.imshow("segmented human hand", n1)
            cv2.waitKey(1)

            # get the clipped joints
            goal = tuple(self.joint_cal(img, isbio=True))

            # only first finger
            start = self.mgi.get_current_joint_values()
            hand_pos = copy.deepcopy(start)
            hand_pos[4] = goal[4]
            hand_pos[3] = goal[3]
            hand_pos[2] = goal[2]
            #print(goal[4])
            #self.mgi.set_joint_value_target(hand_pos)
            #self.mgi.go()

            # interpolate motion trajectories, do collision check then manipulate
            csl_client = rospy.ServiceProxy('CheckSelfCollision',
                                            checkSelfCollision)
            try:
                shadow_pos = csl_client(start, hand_pos)
                if shadow_pos.result:
                    rospy.loginfo("Move Done!")
                else:
                    rospy.logwarn("Failed to move!")
            except rospy.ServiceException as exc:
                rospy.logwarn("Service did not process request: " + str(exc))

            rospy.loginfo("Next one please ---->")
        except:
            rospy.loginfo("no images")
    def online_once(self):
        while True:
            #       /camera/aligned_depth_to_color/image_raw
            img_data = rospy.wait_for_message(
                "/camera/aligned_depth_to_color/image_raw", Image)
            rospy.loginfo("Got an image ^_^")
            try:
                img = self.bridge.imgmsg_to_cv2(img_data,
                                                desired_encoding="passthrough")
            except CvBridgeError as e:
                rospy.logerr(e)

            ff_fixed = False
            mf_fixed = False
            rf_fixed = False
            lf_fixed = False
            th_fixed = False
            if not self.contact_finger[0]:
                ff_fixed = True
            if not self.contact_finger[1]:
                mf_fixed = True
            if not self.contact_finger[2]:
                rf_fixed = True
            if not self.contact_finger[3]:
                lf_fixed = True
            if not self.contact_finger[4]:
                th_fixed = True

            while not (ff_fixed and lf_fixed and rf_fixed and mf_fixed
                       and th_fixed):
                if self.tactile_values['FF'] > self.force_zero['FF']:
                    self.ff_contacted = True
                if self.tactile_values['MF'] > self.force_zero['MF']:
                    self.mf_contacted = True
                if self.tactile_values['RF'] > self.force_zero['RF']:
                    self.rf_contacted = True
                if self.tactile_values['LF'] > self.force_zero['LF']:
                    self.lf_contacted = True
                if self.tactile_values['TH'] > self.force_zero['TH']:
                    self.th_contacted = True

                # preproces
                img = seg_hand_depth(img, 500, 1000, 10, 100, 4, 4, 250, True,
                                     300)
                img = img.astype(np.float32)
                img = img / 255. * 2. - 1

                n = cv2.resize(img, (0, 0), fx=2, fy=2)
                n1 = cv2.normalize(n,
                                   n,
                                   alpha=0,
                                   beta=1,
                                   norm_type=cv2.NORM_MINMAX,
                                   dtype=cv2.CV_32F)
                cv2.imshow("segmented human hand", n1)
                cv2.waitKey(1)

                # get the clipped joints
                goal = self.joint_cal(img, isbio=True)
                start = self.mgi.get_current_joint_values()

                group_variable_values = self.hand_group.get_current_joint_values(
                )
                if (not self.ff_contacted) or (not ff_fixed):
                    ff_values = goal[2:6]
                else:
                    ff_values = group_variable_values[2:6]
                    ff_fixed = True

                if (not self.lf_contacted) and (not lf_fixed):
                    lf_values = goal[6:11]
                else:
                    lf_values = group_variable_values[6:11]
                    lf_fixed = True

                if (not self.mf_contacted) and (not mf_fixed):
                    mf_values = goal[11:15]
                else:
                    mf_values = group_variable_values[11:15]
                    mf_fixed = True

                if (not self.rf_contacted) and (not rf_fixed):
                    rf_values = goal[15:19]
                else:
                    rf_values = group_variable_values[15:19]
                    rf_fixed = True

                if (not self.th_contacted) and (not th_fixed):
                    th_values = goal[19:]
                else:
                    th_values = group_variable_values[19:]
                    th_fixed = True

                updated_variable_values = group_variable_values[
                    0:
                    2] + ff_values + lf_values + mf_values + rf_values + th_values

                try:
                    shadow_pos = self.csl_client(
                        start, tuple(updated_variable_values))
                    if shadow_pos.result:
                        rospy.loginfo("Move Done!")
                    else:
                        rospy.logwarn("Failed to move!")
                except rospy.ServiceException as exc:
                    rospy.logwarn("Service did not process request: " +
                                  str(exc))

                rospy.loginfo("Next one please ---->")

            self.change_controller()
            rospy.sleep(1)
            rospy.set_param('start_tactile_feedback', "True")
            sys.exit()
def func(f):
    img = cv2.imread(f[0], cv2.IMREAD_ANYDEPTH).astype(np.float32)
    output = seg_hand_depth(img)
    cv2.imwrite(f[1], output)
    # copyfile(f[0], f[2])
    print(f[1])
    def online_once(self):
        while True:
            # img_data = rospy.wait_for_message("/camera/depth/image_raw", Image)
            img_data = rospy.wait_for_message(
                "/camera/aligned_depth_to_color/image_raw", Image)
            rospy.loginfo("Got an image ^_^")
            try:
                img = self.bridge.imgmsg_to_cv2(img_data,
                                                desired_encoding="passthrough")
            except CvBridgeError as e:
                rospy.logerr(e)

            try:
                # preproces
                img = seg_hand_depth(img, 500, 1000, 10, 100, 4, 4, 250, True,
                                     300)
                img = img.astype(np.float32)
                img = img / 255. * 2. - 1
                print(img.shape)

                n = cv2.resize(img, (0, 0), fx=2, fy=2)
                n1 = cv2.normalize(n,
                                   n,
                                   alpha=0,
                                   beta=1,
                                   norm_type=cv2.NORM_MINMAX,
                                   dtype=cv2.CV_32F)
                cv2.imshow("segmented human hand", n1)
                cv2.waitKey(1)

                # get the clipped joints
                goal = tuple(self.joint_cal(img, isbio=True))

                # bug:get_joints_position() return radian joints
                hand_pos = self.hand_commander.get_joints_position()

                # first finger
                hand_pos.update({"rh_FFJ3": goal[3]})
                #hand_pos.update({"rh_FFJ2": goal[4]})
                # hand_pos.update({"rh_FFJ4": goal[2]})

                # middle finger
                hand_pos.update({"rh_MFJ3": goal[12]})
                #hand_pos.update({"rh_MFJ2": goal[13]})

                # ring finger
                hand_pos.update({"rh_RFJ3": goal[16]})
                #hand_pos.update({"rh_RFJ2": goal[17]})

                # little finger
                hand_pos.update({"rh_LFJ3": goal[8]})
                #hand_pos.update({"rh_LFJ2": goal[9]})

                # thumb
                #hand_pos.update({"rh_THJ5": goal[19]})
                #hand_pos.update({"rh_THJ4": goal[20]})
                #hand_pos.update({"rh_THJ3": goal[21]})
                #hand_pos.update({"rh_THJ2": goal[22]})

                self.hand_commander.move_to_joint_value_target_unsafe(
                    hand_pos, 0.3, False, angle_degrees=False)
                #rospy.sleep(0.5)

                # collision check and manipulate
                # csl_client = rospy.ServiceProxy('CheckSelfCollision', checkSelfCollision)
                # try:
                #     shadow_pos = csl_client(start, goal)
                #     if shadow_pos.result:
                #         rospy.loginfo("Move Done!")
                #     else:
                #        rospy.logwarn("Failed to move!")
                # except rospy.ServiceException as exc:
                #    rospy.logwarn("Service did not process request: " + str(exc))

                rospy.loginfo("Next one please ---->")
            except:
                rospy.loginfo("no images")