def _evaluate_calibration(self, calibrations):
        def quaternion_cost(norm_coeff):
            C = 0
            for transform in list_calibr:
                # norm of a quaternion is always 1
                C += norm_coeff * abs(np.linalg.norm(transform[1]) - 1)
            return C

        def distance_cost(pose1, pose2, rot_coeff=2):
            pos_cost = 0
            # calculate position ditance
            pos_cost = np.linalg.norm(np.array(pose1[0]) - np.array(pose2[0]))
            # distance between two quaternions
            rot_cost = 1 - np.inner(pose1[1], pose2[1])**2
            return pos_cost + rot_coeff * rot_cost

        def base_cost(base_pose, coeff=2):
            q = base_pose[1]
            # calculate z axis from quaternion
            z = [
                2 * (q[0] * q[2] + q[1] * q[3]),
                2 * (q[1] * q[2] - q[0] * q[3]),
                1 - 2 * q[0] * q[0] - 2 * q[1] * q[1]
            ]
            # check that the z base axis is normal to the ground
            cost = coeff * abs(np.dot(z, self.ground_axis) - 1)
            return cost

        # first extract the transformations
        list_calibr = self.extract_transforms(calibrations)
        # collect the cost based on the quaternions
        cost = quaternion_cost(0.5)
        # set the base transform
        base_transform = list_calibr[0]
        inv_base = transformations.inverse_transform(base_transform)
        # get pose of the base
        base_pose = transformations.multiply_transform(
            self.recorded_poses[self.human.prefix + '/base'], inv_base)
        # calculate cost of the base
        cost += (10 * base_cost(base_pose))
        # loop trough all the transforms
        for i in range(1, len(list_calibr)):
            key = self.keys[i]
            # get the fk
            fk = self.fk[key]
            # compute the corresponding transformation from recorded data
            pose = transformations.multiply_transform(base_transform,
                                                      self.recorded_poses[key])
            pose = transformations.multiply_transform(
                pose, transformations.inverse_transform(list_calibr[i]))
            pose[1] /= np.linalg.norm(pose[1])
            # compute the cost based on the distance
            cost += distance_cost(fk, pose)

        print cost
        return cost
    def _evaluate_calibration(self, calibrations):
        def quaternion_cost(norm_coeff):
            C = 0
            for transform in list_calibr:
                # norm of a quaternion is always 1
                C += norm_coeff * abs(np.linalg.norm(transform[1]) - 1)
            return C

        def distance_cost(pose1, pose2, rot_coeff=2):
            pos_cost = 0
            # calculate position ditance
            pos_cost = np.linalg.norm(np.array(pose1[0]) - np.array(pose2[0]))
            # distance between two quaternions
            rot_cost = 1 - np.inner(pose1[1], pose2[1])**2
            return pos_cost + rot_coeff * rot_cost

        def base_cost(base_pose, coeff=2):
            q = base_pose[1]
            # calculate z axis from quaternion
            z = [2 * (q[0] * q[2] + q[1] * q[3]),
                 2 * (q[1] * q[2] - q[0] * q[3]),
                 1 - 2 * q[0] * q[0] - 2 * q[1] * q[1]]
            # check that the z base axis is normal to the ground
            cost = coeff * abs(np.dot(z, self.ground_axis) - 1)
            return cost

        # first extract the transformations
        list_calibr = self.extract_transforms(calibrations)
        # collect the cost based on the quaternions
        cost = quaternion_cost(0.5)
        # set the base transform
        base_transform = list_calibr[0]
        inv_base = transformations.inverse_transform(base_transform)
        # get pose of the base
        base_pose = transformations.multiply_transform(self.recorded_poses[self.human.prefix + '/base'], inv_base)
        # calculate cost of the base
        cost += (10 * base_cost(base_pose))
        # loop trough all the transforms
        for i in range(1, len(list_calibr)):
            key = self.keys[i]
            # get the fk
            fk = self.fk[key]
            # compute the corresponding transformation from recorded data
            pose = transformations.multiply_transform(base_transform, self.recorded_poses[key])
            pose = transformations.multiply_transform(pose, transformations.inverse_transform(list_calibr[i]))
            pose[1] /= np.linalg.norm(pose[1])
            # compute the cost based on the distance
            cost += distance_cost(fk, pose)

        print cost
        return cost
Exemple #3
0
 def pred_positioned(self, master, slave, atp):
     """
     Checks if any constraint between master and slave exists at attach point atp and returns True if the constraint is within the tolerance
     :param master:
     :param slave:
     :param atp: (int)
     :return: True if predicate POSITIONED(master, slave, atp) is True
     """
     if master + slave + str(atp) in self.attached:
         return True
     try:
         # WARNING: Do not ask the relative tf directly, it is outdated!
         tf_slave = self.tfl.lookupTransform(self.world, slave,
                                             rospy.Time(0))
         tf_master = self.tfl.lookupTransform(self.world, master,
                                              rospy.Time(0))
     except Exception as e:
         pass
     else:
         relative = transformations.multiply_transform(
             transformations.inverse_transform(tf_master), tf_slave)
         constraint = self.poses[master]['constraints'][atp][slave]
         cart_dist = transformations.distance(constraint, relative)
         quat_dist = transformations.distance_quat(constraint, relative)
         return cart_dist < self.config['positioned'][
             'position_tolerance'] and quat_dist < self.config[
                 'positioned']['orientation_tolerance']
     return False
    def compute_sub_ik(self, group, desired_dict, result, tol=0.001):
        # get the desired pose in the correct base frame
        index = self.links.index(group)
        base = self.bases[index]
        tr = desired_dict[group]
        if base != self.prefix + '/base':
            if base in desired_dict.keys():
                tr_base = desired_dict[base]
                inv_base = transformations.inverse_transform(tr_base)
                desired_pose = transformations.multiply_transform(inv_base, tr)
            else:
                return 1
        else:
            desired_pose = tr

        # transform it back to PoseStamped
        desired_pose = transformations.list_to_pose(desired_pose)
        try:
            # call the srv
            res = self.div_ik_srv[group](desired_poses=[desired_pose], tolerance=tol)
            joints = res.joint_state.position
        except:
            return 1

        with self.lock:
            result[group] = joints
Exemple #5
0
 def compute_sub_ik(self, group, desired_dict, result, seed, tol=0.1):
     # get the desired pose in the correct base frame
     base = self.links[group]
     tr = desired_dict[group]
     if desired_dict:
         base_found = False
         if base != self.prefix + '/base':
             if base in desired_dict.keys():
                 base_found = True
                 tr_base = desired_dict[base]
             else:
                 # look for the base in already calculated results
                 timeout = 0.25
                 start = time()
                 while not base_found and not rospy.is_shutdown() and (
                         time() - start < timeout):
                     with self.lock:
                         for key, value in result.iteritems():
                             if base in self.model.get_links_chain(
                                     self.links[key], key):
                                 base_found = True
                                 js = JointState()
                                 js.name = value['joint_names']
                                 js.position = value['joint_values']
                                 # call the forward kinematic
                                 fk = self.model.forward_kinematic(
                                     js, links=base)
                                 tr_base = fk[base]
             if base_found:
                 inv_base = transformations.inverse_transform(tr_base)
                 desired_pose = transformations.multiply_transform(
                     inv_base, tr)
             else:
                 return 1
         else:
             desired_pose = tr
     else:
         return 1
     # transform it back to PoseStamped
     desired_pose = transformations.list_to_pose(desired_pose)
     desired_pose.header.frame_id = base
     # try:
     # call the srv
     res = self.div_ik_srv[group](desired_poses=[desired_pose],
                                  tolerance=tol,
                                  seed=seed)
     joint_state = res.joint_state
     # except Exception, e:
     #     print e
     #     return 1
     with self.lock:
         result[group] = {
             'joint_names': joint_state.name,
             'joint_values': joint_state.position
         }
 def calibration_matrices(self, d):
     mat_dict = {}
     self.calibrated_frames_set = {}
     for sensor, dico in d.iteritems():
         mat_dict[sensor] = {}
         self.calibrated_frames_set[sensor] = set()
         for key, value in dico.iteritems():
             mat_dict[sensor][key] = value
             mat_dict[sensor][key + '/inv'] = transformations.inverse_transform(value)
             self.calibrated_frames_set[sensor].add(key)
     return mat_dict
 def calibration_matrices(self, d):
     mat_dict = {}
     self.calibrated_frames_set = {}
     for sensor, dico in d.iteritems():
         mat_dict[sensor] = {}
         self.calibrated_frames_set[sensor] = set()
         for key, value in dico.iteritems():
             mat_dict[sensor][key] = value
             mat_dict[sensor][key + '/inv'] = transformations.inverse_transform(value)
             self.calibrated_frames_set[sensor].add(key)
     return mat_dict
        def correct_base_guess(base_transform):
            if base_transform == 0:
                return False

            inv_base = transformations.inverse_transform(base_transform)
            q = inv_base[1]
            # calculate x base axis from quaternion
            x = [1 - 2 * q[1] * q[1] - 2 * q[2] * q[2],
                 2 * (q[0] * q[1] + q[2] * q[3]),
                 2 * (q[0] * q[2] - q[1] * q[3])]
            # check that the base is facing the robot (cos < 0)
            epsilon = -0.1
            return np.dot(x, self.robot_axis) < epsilon
        def correct_base_guess(base_transform):
            if base_transform == 0:
                return False

            inv_base = transformations.inverse_transform(base_transform)
            q = inv_base[1]
            # calculate x base axis from quaternion
            x = [
                1 - 2 * q[1] * q[1] - 2 * q[2] * q[2],
                2 * (q[0] * q[1] + q[2] * q[3]),
                2 * (q[0] * q[2] - q[1] * q[3])
            ]
            # check that the base is facing the robot (cos < 0)
            epsilon = -0.1
            return np.dot(x, self.robot_axis) < epsilon
Exemple #10
0
 def pred_position(self, master, slave, atp):
     try:
         # WARNING: Do not ask the relative tf directly, it is outdated!
         tf_slave = self.tfl.lookupTransform(self.world, slave,
                                             rospy.Time(0))
         tf_master = self.tfl.lookupTransform(self.world, master,
                                              rospy.Time(0))
     except Exception:
         pass
     else:
         relative = transformations.multiply_transform(
             transformations.inverse_transform(tf_master), tf_slave)
         constraint = self.poses[master]['constraints'][atp][slave]
         cart_dist = transformations.distance(constraint, relative)
         quat_dist = transformations.distance_quat(constraint, relative)
         return (cart_dist < self.config['start_position']['position_tolerance'] and
                 quat_dist < self.config['start_position']['orientation_tolerance']) and\
             not (cart_dist < self.config['positioned']['position_tolerance'] and
                  quat_dist < self.config['positioned']['orientation_tolerance'])
Exemple #11
0
 def pred_attached(self, master, slave, atp):
     if master + slave + str(atp) in self.attached:
         return True
     elif self.pred_positioned(master, slave, atp):
         try:
             # WARNING: Do not ask the relative tf directly, it is outdated!
             screwdriver = self.tfl.lookupTransform(self.world,
                                                    self.screwdriver,
                                                    rospy.Time(0))
             tf_master = self.tfl.lookupTransform(self.world, master,
                                                  rospy.Time(0))
         except:
             pass
         else:
             if self.screwdriver in self.poses[master]['constraints'][
                     atp]:  # For objects that need to be screwed
                 relative = transformations.multiply_transform(
                     transformations.inverse_transform(tf_master),
                     screwdriver)
                 cart_dist = transformations.distance(
                     relative, self.poses[master]['constraints'][atp][
                         self.screwdriver])
                 if cart_dist < self.config['attached'][
                         'tool_position_tolerance']:
                     try:
                         if rospy.Time.now() - self.attaching_stamps[
                                 master][slave] > rospy.Duration(
                                     self.config['attached']
                                     ['screwdriver_attaching_time']):
                             self.attaching_started.append(master + slave +
                                                           str(atp))
                     except KeyError:
                         if not self.attaching_stamps.has_key(master):
                             self.attaching_stamps[master] = {}
                         self.attaching_stamps[master][
                             slave] = rospy.Time.now()
                 elif master + slave + str(atp) in self.attaching_started:
                     self.attached.append(master + slave + str(atp))
             else:  # For objects that only need to be inserted
                 # self.screwed.append(master+slave+str(atp))
                 self.attached.append(master + slave + str(atp))
     return False
Exemple #12
0
 def pred_screw(self, master, slave, atp, state):
     if Predicate(type='positioned',
                  parameters=[master, slave, str(atp)]) in state.predicates:
         try:
             # WARNING: Do not ask the relative tf directly, it is outdated!
             screwdriver = self.tfl.lookupTransform(self.world,
                                                    self.screwdriver,
                                                    rospy.Time(0))
             tf_master = self.tfl.lookupTransform(self.world, master,
                                                  rospy.Time(0))
         except:
             pass
         else:
             relative = transformations.multiply_transform(
                 transformations.inverse_transform(tf_master), screwdriver)
             cart_dist = transformations.distance(
                 relative,
                 self.poses[master]['constraints'][atp][self.screwdriver])
             #quat_dist = transformations.distance_quat(relative, self.poses[master]['constraints'][atp][self.screwdriver])
             # Do not measure orientation, since the screwdriver has to spin to screw
             return cart_dist < self.config['attached'][
                 'tool_position_tolerance']
     return False
class HumanModel(object):
    def __init__(self,
                 description='human_description',
                 prefix='human',
                 control=False):
        rospack = RosPack()
        self.path = rospack.get_path('human_moveit_config')
        self.description = description
        self.robot_commander = moveit_commander.RobotCommander(description)
        if control:
            self.joint_publisher = rospy.Publisher('/human/set_joint_values',
                                                   JointState,
                                                   queue_size=1)
        self.groups = {}
        self.groups['head'] = moveit_commander.MoveGroupCommander(
            'Head', description)
        self.groups['right_arm'] = moveit_commander.MoveGroupCommander(
            'RightArm', description)
        self.groups['left_arm'] = moveit_commander.MoveGroupCommander(
            'LeftArm', description)
        self.groups['right_leg'] = moveit_commander.MoveGroupCommander(
            'RightLeg', description)
        self.groups['left_leg'] = moveit_commander.MoveGroupCommander(
            'LeftLeg', description)
        self.groups['upper_body'] = moveit_commander.MoveGroupCommander(
            'UpperBody', description)
        self.groups['lower_body'] = moveit_commander.MoveGroupCommander(
            'LowerBody', description)
        self.groups['whole_body'] = moveit_commander.MoveGroupCommander(
            'WholeBody', description)
        # initialize end-effectors dict
        self.end_effectors = {}
        # fill both dict
        for key, value in self.groups.iteritems():
            self.end_effectors[key] = value.get_end_effector_link()
        # add the list of end-effectors for bodies
        self.end_effectors['upper_body'] = [
            self.end_effectors['head'], self.end_effectors['right_arm'],
            self.end_effectors['left_arm']
        ]
        self.end_effectors['lower_body'] = [
            self.end_effectors['right_leg'], self.end_effectors['left_leg']
        ]
        self.end_effectors['whole_body'] = self.end_effectors[
            'upper_body'] + self.end_effectors['lower_body']
        # initialize common links per group
        self.group_links = {}
        self.group_links['head'] = [
            prefix + '/shoulder_center', prefix + '/head'
        ]
        # fill the disct of active joints by links
        self.joint_by_links = {}
        self.joint_by_links[prefix + '/shoulder_center'] = [
            'spine_0', 'spine_1', 'spine_2'
        ]
        self.joint_by_links[prefix + '/head'] = ['neck_0', 'neck_1', 'neck_2']
        sides = ['right', 'left']
        for s in sides:
            self.group_links[s + '_arm'] = [
                prefix + '/' + s + '_shoulder', prefix + '/' + s + '_elbow',
                prefix + '/' + s + '_hand'
            ]
            self.group_links[s + '_leg'] = [
                prefix + '/' + s + '_hip', prefix + '/' + s + '_knee',
                prefix + '/' + s + '_foot'
            ]
            # arm links
            self.joint_by_links[prefix + '/' + s + '_shoulder'] = [
                s + '_shoulder_0', s + '_shoulder_1', s + '_shoulder_2'
            ]
            self.joint_by_links[prefix + '/' + s +
                                '_elbow'] = [s + '_elbow_0', s + '_elbow_1']
            self.joint_by_links[prefix + '/' + s +
                                '_hand'] = [s + '_wrist_0', s + '_wrist_1']
            # leg links
            self.joint_by_links[prefix + '/' + s + '_hip'] = [
                s + '_hip_0', s + '_hip_1', s + '_hip_2'
            ]
            self.joint_by_links[prefix + '/' + s + '_knee'] = [s + '_knee']
            self.joint_by_links[prefix + '/' + s +
                                '_foot'] = [s + '_ankle_0', s + '_ankle_1']
        self.prefix = prefix

        rospy.wait_for_service('compute_fk')
        self.compute_fk = rospy.ServiceProxy('compute_fk', GetPositionFK)
        self.current_state = self.get_initial_state()

    def init_nearest_neighbour_trees(self):
        def load_database(link):
            database = np.load(
                join(self.path, 'database8', 'database_' + link + '.npz'))
            data_dict = {}
            data_dict['data'] = database
            if self.with_orient:
                data_dict['tree'] = cKDTree(database['data'][:, -7:])
            else:
                data_dict['tree'] = cKDTree(database['data'][:, -7:-4])
            return data_dict

        self.trees = {}
        links = ['head', 'shoulder_center']
        for s in ['right', 'left']:
            links += [s + '_elbow', s + '_hand']
        for l in links:
            self.trees[l] = load_database(l)

    def get_group_of_link(self, link):
        for key, value in self.group_links.iteritems():
            if link in value:
                group = key
                break
        return group

    def extract_group_joints(self, group_name, joint_state):
        active = self.groups[group_name].get_active_joints()
        res = []
        for joint in active:
            index = joint_state.name.index(joint)
            res.append(joint_state.position[index])
        return res

    def forward_kinematic(self, joint_state, base='base', links=None):
        def compute_fk_client():
            try:
                header = Header()
                header.stamp = rospy.Time.now()
                header.frame_id = self.prefix + '/base'
                rs = RobotState()
                rs.joint_state = joint_state
                res = self.compute_fk(header, links, rs)
                return res.pose_stamped
            except rospy.ServiceException, e:
                print "Service call failed: %s" % e
                # in case of troubles return 0 pose stamped
                return []

        if links is None:
            links = self.end_effectors['whole_body']
        if type(links) is not list:
            if links == "all":
                links = self.get_link_names('whole_body')
            else:
                links = [links]
        # check that the base is in links
        if base != 'base' and base not in links:
            links.append(base)
        pose_stamped_list = compute_fk_client()
        if not pose_stamped_list:
            return {}
        # transform it in a dict of poses
        pose_dict = {}
        if base != 'base':
            tr_base = transformations.pose_to_list(
                pose_stamped_list[links.index(base)].pose)
            inv_base = transformations.inverse_transform(tr_base)
            for i in range(len(links)):
                if links[i] != base:
                    tr = transformations.pose_to_list(
                        pose_stamped_list[i].pose)
                    pose_dict[links[i]] = transformations.multiply_transform(
                        inv_base, tr)
        else:
            for i in range(len(links)):
                pose_dict[links[i]] = transformations.pose_to_list(
                    pose_stamped_list[i].pose)
        return pose_dict
class HumanModel(object):
    def __init__(self,
                 description='human_description',
                 prefix='human',
                 control=False):
        self.description = description
        self.robot_commander = RobotCommander(description)
        if control:
            self.joint_publisher = rospy.Publisher('/human/set_joint_values',
                                                   JointState,
                                                   queue_size=1)
        self.groups = {}
        self.groups['head'] = MoveGroupCommander('Head', description)
        self.groups['right_arm'] = MoveGroupCommander('RightArm', description)
        self.groups['left_arm'] = MoveGroupCommander('LeftArm', description)
        self.groups['right_leg'] = MoveGroupCommander('RightLeg', description)
        self.groups['left_leg'] = MoveGroupCommander('LeftLeg', description)
        self.groups['upper_body'] = MoveGroupCommander('UpperBody',
                                                       description)
        self.groups['lower_body'] = MoveGroupCommander('LowerBody',
                                                       description)
        self.groups['whole_body'] = MoveGroupCommander('WholeBody',
                                                       description)
        # initialize end-effectors dict
        self.end_effectors = {}
        # fill both dict
        for key, value in self.groups.iteritems():
            self.end_effectors[key] = value.get_end_effector_link()
        # add the list of end-effectors for bodies
        self.end_effectors['upper_body'] = [
            self.end_effectors['head'], self.end_effectors['right_arm'],
            self.end_effectors['left_arm']
        ]
        self.end_effectors['lower_body'] = [
            self.end_effectors['right_leg'], self.end_effectors['left_leg']
        ]
        self.end_effectors['whole_body'] = self.end_effectors[
            'upper_body'] + self.end_effectors['lower_body']
        self.prefix = prefix
        self.urdf_reader = URDFReader()

        rospy.wait_for_service('compute_fk')
        self.compute_fk = rospy.ServiceProxy('compute_fk', GetPositionFK)
        self.current_state = self.get_initial_state()

    def find_common_root(self, link1, link2):
        return self.urdf_reader.find_common_root(link1, link2)

    def get_links_chain(self, from_link, to_link):
        return self.urdf_reader.get_links_chain(from_link, to_link)

    def get_joints_chain(self, from_link, to_link):
        return self.urdf_reader.get_joints_chain(from_link, to_link)

    def extract_group_joints(self, group_name, joint_state):
        active = self.groups[group_name].get_active_joints()
        res = []
        for joint in active:
            index = joint_state.name.index(joint)
            res.append(joint_state.position[index])
        return res

    def forward_kinematic(self, joint_state, base='base', links=None):
        def compute_fk_client():
            try:
                header = Header()
                header.stamp = rospy.Time.now()
                header.frame_id = self.prefix + '/base'
                rs = RobotState()
                rs.joint_state = joint_state
                res = self.compute_fk(header, links, rs)
                return res.pose_stamped
            except rospy.ServiceException, e:
                print "Service call failed: %s" % e
                # in case of troubles return 0 pose stamped
                return []

        if links is None:
            links = self.end_effectors['whole_body']
        if type(links) is not list:
            if links == "all":
                links = self.get_link_names('whole_body')
            else:
                links = [links]
        # check that the base is in links
        if base != 'base' and base not in links:
            links.append(base)
        pose_stamped_list = compute_fk_client()
        if not pose_stamped_list:
            return {}
        # transform it in a dict of poses
        pose_dict = {}
        if base != 'base':
            tr_base = transformations.pose_to_list(
                pose_stamped_list[links.index(base)].pose)
            inv_base = transformations.inverse_transform(tr_base)
            for i in range(len(links)):
                if links[i] != base:
                    tr = transformations.pose_to_list(
                        pose_stamped_list[i].pose)
                    pose_dict[links[i]] = transformations.multiply_transform(
                        inv_base, tr)
        else:
            for i in range(len(links)):
                pose_dict[links[i]] = transformations.pose_to_list(
                    pose_stamped_list[i].pose)
        return pose_dict
    def calibrate(self, record, frames='all', maxiter=1000):
        def random_transforms(pos_bounds, rot_bounds):
            flat_transforms = []
            for key in self.keys:
                # add a random position within bounds
                # flat_transforms += np.random.uniform(pos_bounds[0], pos_bounds[1], 3).tolist()
                flat_transforms += [0, 0, 0]
                # add a random quaternion
                rot = np.random.uniform(rot_bounds[0], rot_bounds[1], 4)
                flat_transforms += (rot / np.linalg.norm(rot)).tolist()
            return flat_transforms

        def correct_base_guess(base_transform):
            if base_transform == 0:
                return False

            inv_base = transformations.inverse_transform(base_transform)
            q = inv_base[1]
            # calculate x base axis from quaternion
            x = [
                1 - 2 * q[1] * q[1] - 2 * q[2] * q[2],
                2 * (q[0] * q[1] + q[2] * q[3]),
                2 * (q[0] * q[2] - q[1] * q[3])
            ]
            # check that the base is facing the robot (cos < 0)
            epsilon = -0.1
            return np.dot(x, self.robot_axis) < epsilon

        self.recorded_poses = record
        # calculate the fk of the human model in T pose
        js = self.human.get_initial_state()
        self.fk = self.human.forward_kinematic(js, links='all')
        if frames == 'all':
            self.keys = record.keys()
            # remove the hip from the list to put in first position
            self.keys.remove(self.human.prefix + '/base')
        else:
            self.keys = frames
        self.keys = [self.human.prefix + '/base'] + self.keys
        # set limits for search space
        bounds = []
        pos_bounds = [-0.1, 0.1]
        rot_bounds = [-1, 1]
        for key in self.keys:
            if key == self.human.prefix + '/base':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append([-0.01, 0.01])
            elif key == self.human.prefix + '/shoulder_center':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append(pos_bounds)
            elif key == self.human.prefix + '/head':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append([0.05, 0.2])
            else:
                for i in range(3):
                    bounds.append(pos_bounds)
            for i in range(4):
                bounds.append(rot_bounds)
        # collect initial guess
        base_guess = 0
        while not correct_base_guess(base_guess):
            initial_calibr = random_transforms(pos_bounds, rot_bounds)
            base_guess = [initial_calibr[0:3], initial_calibr[3:7]]

        # find the optimized calibration
        res = opti.minimize(self._evaluate_calibration,
                            initial_calibr,
                            bounds=bounds,
                            method='L-BFGS-B')
        # options={'maxfun': maxiter})

        # extract the list of transformations
        list_transforms = self.extract_transforms(res.x)
        res_calibr = {}
        for i in range(len(self.keys)):
            pos = list_transforms[i][0].tolist()
            rot = list_transforms[i][1].tolist()
            inv_trans = transformations.inverse_transform([pos, rot])
            res_calibr[self.keys[i]] = [list(inv_trans[0]), list(inv_trans[1])]
        print res
        return res_calibr
    def calibrate(self, record, frames='all', maxiter=1000):
        def random_transforms(pos_bounds, rot_bounds):
            flat_transforms = []
            for key in self.keys:
                # add a random position within bounds
                # flat_transforms += np.random.uniform(pos_bounds[0], pos_bounds[1], 3).tolist()
                flat_transforms += [0, 0, 0]
                # add a random quaternion
                rot = np.random.uniform(rot_bounds[0], rot_bounds[1], 4)
                flat_transforms += (rot / np.linalg.norm(rot)).tolist()
            return flat_transforms

        def correct_base_guess(base_transform):
            if base_transform == 0:
                return False

            inv_base = transformations.inverse_transform(base_transform)
            q = inv_base[1]
            # calculate x base axis from quaternion
            x = [1 - 2 * q[1] * q[1] - 2 * q[2] * q[2],
                 2 * (q[0] * q[1] + q[2] * q[3]),
                 2 * (q[0] * q[2] - q[1] * q[3])]
            # check that the base is facing the robot (cos < 0)
            epsilon = -0.1
            return np.dot(x, self.robot_axis) < epsilon

        self.recorded_poses = record
        # calculate the fk of the human model in T pose
        js = self.human.get_initial_state()
        self.fk = self.human.forward_kinematic(js, links='all')
        if frames == 'all':
            self.keys = record.keys()
            # remove the hip from the list to put in first position
            self.keys.remove(self.human.prefix + '/base')
        else:
            self.keys = frames
        self.keys = [self.human.prefix + '/base'] + self.keys
        # set limits for search space
        bounds = []
        pos_bounds = [-0.1, 0.1]
        rot_bounds = [-1, 1]
        for key in self.keys:
            if key == self.human.prefix + '/base':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append([-0.01, 0.01])
            elif key == self.human.prefix + '/shoulder_center':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append(pos_bounds)
            elif key == self.human.prefix + '/head':
                bounds.append([0.05, 0.2])
                bounds.append(pos_bounds)
                bounds.append([0.05, 0.2])
            else:
                for i in range(3):
                    bounds.append(pos_bounds)
            for i in range(4):
                bounds.append(rot_bounds)
        # collect initial guess
        base_guess = 0
        while not correct_base_guess(base_guess):
            initial_calibr = random_transforms(pos_bounds, rot_bounds)
            base_guess = [initial_calibr[0:3], initial_calibr[3:7]]

        # find the optimized calibration
        res = opti.minimize(self._evaluate_calibration,
                            initial_calibr,
                            bounds=bounds,
                            method='L-BFGS-B')
                            # options={'maxfun': maxiter})

        # extract the list of transformations
        list_transforms = self.extract_transforms(res.x)
        res_calibr = {}
        for i in range(len(self.keys)):
            pos = list_transforms[i][0].tolist()
            rot = list_transforms[i][1].tolist()
            inv_trans = transformations.inverse_transform([pos, rot])
            res_calibr[self.keys[i]] = [list(inv_trans[0]), list(inv_trans[1])]
        print res
        return res_calibr