def randomQ(arm):
    (lower, upper) = arm.GetJointLimits()
    dofs = numpy.zeros(len(lower))
    lower[0] = -math.pi / 2.0
    upper[0] = math.pi / 2.0
    for i in xrange(len(lower)):
        dofs[i] = random.uniform(lower[i], upper[i])
    return dofs


if __name__ == '__main__':
    rospy.init_node("path_testing")
    realarm = ArmInterface()
    q0 = realarm.convertToList(realarm.joint_angles())
    startq = realarm.joint_angles()

    while True:
        # Rather than peturbing pose, perturb q as a hack
        delta_q = numpy.random.rand(7) * 0.2
        q1_seed = numpy.add(q0, delta_q)

        raw_input("seed")
        realarm.move_to_joint_positions(realarm.convertToDict(q1_seed))
        p1 = realarm.endpoint_pose()
        raw_input("q0")
        realarm.move_to_joint_positions(realarm.convertToDict(q0))
        p0 = realarm.endpoint_pose()

        raw_input("p1")
Пример #2
0
if __name__ == '__main__':
    rospy.init_node("test_robot")
    r = ArmInterface(
    )  # create arm interface instance (see https://justagist.github.io/franka_ros_interface/DOC.html#arminterface for all available methods for ArmInterface() object)
    cm = r.get_controller_manager(
    )  # get controller manager instance associated with the robot (not required in most cases)
    mvt = r.get_movegroup_interface(
    )  # get the moveit interface for planning and executing trajectories using moveit planners (see https://justagist.github.io/franka_ros_interface/DOC.html#franka_moveit.PandaMoveGroupInterface for documentation)

    elapsed_time_ = rospy.Duration(0.0)
    period = rospy.Duration(0.005)

    r.move_to_neutral()  # move robot to neutral pose

    initial_pose = r.joint_angles()  # get current joint angles of the robot

    jac = r.zero_jacobian()  # get end-effector jacobian

    count = 0
    rate = rospy.Rate(1000)

    rospy.loginfo("Commanding...\n")
    joint_names = r.joint_names()
    vals = r.joint_angles()
    while not rospy.is_shutdown():

        elapsed_time_ += period

        delta = 3.14 / 16.0 * (
            1 - np.cos(3.14 / 5.0 * elapsed_time_.to_sec())) * 0.2
    # Stop recording rosbag
    terminate_ros_node("/record")
    rospy.sleep(1)
    #hand.Open() TODO


def zeroFTSensor():
    zeroSensorRos = rospy.ServiceProxy('/netft/zero', srv.Zero)
    rospy.wait_for_service('/netft/zero', timeout=0.5)
    zeroSensorRos()


def terminate_ros_node(s):
    list_cmd = subprocess.Popen("rosnode list",
                                shell=True,
                                stdout=subprocess.PIPE)
    list_output = list_cmd.stdout.read()
    retcode = list_cmd.wait()
    assert retcode == 0, "List command returned %d" % retcode
    for string in list_output.split("\n"):
        if string.startswith(s):
            os.system("rosnode kill " + string)


if __name__ == '__main__':
    rospy.init_node("grip_tests")
    realarm = ArmInterface()
    q0 = realarm.joint_angles()

    IPython.embed()
Пример #4
0
def skew_mat(p):
    return np.array([
        [0, -p[2], p[1]],
        [p[2], 0, -p[0]],
        [-p[1], p[0], 0]
    ])

if __name__ == '__main__':

    rospy.init_node("reference_governor")

    # Create the robot 
    robot = ArmInterface()
    
    vals_pos = robot.joint_angles()
    vals_vel = robot.joint_velocities()
    joint_names = robot.joint_names()

    rospy.loginfo('RefGov: Moving to neutral')
    robot.move_to_neutral()
    
    # Subscriber to the camera_info topic
    camera_info_msg = rospy.wait_for_message("/d435_color/camera_info", CameraInfo, timeout=None)
    intrinsic = get_intrinsic_param(camera_info_msg)
    
    # Transformations

    tf_listener = tf.TransformListener()
    tf_broadcaster = tf.TransformBroadcaster()
Пример #5
0
def main(args):
    NOISE = 0.00005

    # get a bunch of random blocks
    blocks = load_blocks(fname=args.blocks_file, num_blocks=10, remove_ixs=[1])

    agent = PandaAgent(blocks,
                       NOISE,
                       use_platform=False,
                       teleport=False,
                       use_action_server=False,
                       use_vision=False,
                       real=True)
    agent.step_simulation(T=1, vis_frames=True, lifeTime=0.)

    agent.plan()
    fixed = [f for f in agent.fixed if f is not None]
    grasps_fn = get_grasp_gen(agent.robot,
                              add_slanted_grasps=False,
                              add_orthogonal_grasps=True)
    path_planner = get_free_motion_gen(agent.robot, fixed)
    ik_fn = get_ik_fn(agent.robot,
                      fixed,
                      approach_frame='gripper',
                      backoff_frame='global',
                      use_wrist_camera=False)

    from franka_interface import ArmInterface
    arm = ArmInterface()
    #arm.move_to_neutral()
    start_q = arm.convertToList(arm.joint_angles())
    start_q = pb_robot.vobj.BodyConf(agent.robot, start_q)

    body = agent.pddl_blocks[args.id]
    pose = pb_robot.vobj.BodyPose(body, body.get_base_link_pose())
    for g in grasps_fn(body):
        grasp = g[0]
        # Check that the grasp points straight down.
        obj_worldF = pb_robot.geometry.tform_from_pose(pose.pose)
        grasp_worldF = np.dot(obj_worldF, grasp.grasp_objF)
        grasp_worldR = grasp_worldF[:3, :3]

        e_x, e_y, e_z = np.eye(3)
        is_top_grasp = grasp_worldR[:, 2].dot(-e_z) > 0.999

        if not is_top_grasp: continue

        print('Getting IK...')
        approach_q = ik_fn(body, pose, grasp, return_grasp_q=True)[0]
        print('Planning move to path')
        command1 = path_planner(start_q, approach_q)
        print('Planning return home')
        command2 = path_planner(approach_q, start_q)

        agent.execute()
        input('Ready to execute?')
        command1[0][0].simulate(timestep=0.25)
        input('Move back in sim?')
        command2[0][0].simulate(timestep=0.25)

        input('Move to position on real robot?')
        command1[0][0].execute(realRobot=arm)
        input('Reset position on real robot?')
        command2[0][0].execute(realRobot=arm)

        break