def kuka_calibration(): rospy.init_node('panda_camera_calibration') sim = rospy.get_param('~sim', default=True) # Initialize the robot abstraction p = PandaArm() # med.connect() # med.set_control_mode(ControlMode.JOINT_POSITION, vel=0.1) # Initialize the calibration calibrator = CameraApriltagCalibration(tag_id=0, calibration_frame_name='calibration_frame', parent_frame_name='med_base') ## TODO : Change this calibration configurations ## calibration_configurations_base= [ [0.552, 0.614, 0.971, -1.57, 2.298, -1.432, -2.538], [0.149, 0.796, 0.516, -1.567, 1.798, -0.562, -2.243], [0.039, 1.21, 0.234, -0.847, 1.71, -0.371, -1.913], [-0.26, 0.512, 0.408, -1.399, 2.381, -0.663, -2.535], [-0.389, 0.587, -0.002, -1.57, 2.394, -1.04, -1.535], [-0.389, 0.587, -0.002, -1.57, 2.394, -1.04, -1.535], [-0.655, 0.823, 0.059, -1.568, 2.278, -0.833, -1.563] ] # poses to take measurments wrist_joint_values = [-2.5, -1.5, 0] calibration_configurations = [] for cal_conf_base in calibration_configurations_base: for wrist_joint_v in wrist_joint_values: cc_i = cal_conf_base[:-1] + [wrist_joint_v] calibration_configurations.append(cc_i) # calibration_configurations = calibration_configurations_base for i, calibration_conf in enumerate(tqdm(calibration_configurations, desc="Calibrating Cameras")): # # med.plan_to_pose(med.arm_group, med., target_position=target_position) # # print('calibration pose {}/{}'.format(i, len(calibration_configurations-1))) # med.plan_to_joint_config(med.arm_group, calibration_conf) p.move_to_joint_position(calibration_conf) calibrator.take_mesurement() # move the robot back to default configuration med.plan_to_joint_config(med.arm_group, [0, 0, 0, 0, 0, 0, 0]) # compute the camera position and broadcast it calibrator.broadcast_tfs() rospy.spin()
def __init__(self, hyperparams): config = copy.deepcopy(AGENT_PANDA) config.update(hyperparams) Agent.__init__(self, config) rospy.init_node('gps_agent_panda') self.period = self._hyperparams['dt'] self.r = rospy.Rate(1. / self.period) self._setup_conditions() # self._setup_world(hyperparams['filename']) self._set_initial_state() ## TODO : decide to use 'use_camera' or not. self.panda = PandaArm() # if RGB_IMAGE in self.obs_data_types: # self.panda.use_camera = True self.pipeline = rs.pipeline() self.config = rs.config() self.config.enable_stream(rs.stream.color, self._hyperparams['image_width'], self._hyperparams['image_height'], rs.format.bgr8, 30) self.pipeline.start(config)
camera_rot_to_gripper = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) image_to_camera = np.array([[0, 0, 1], [-1, 0, 0], [0, -1, 0]]) # camera to gripper extrinsic extrinsic = combined_RT(image_to_camera, np.array([0.0, 0.0, 0.0])) extrinsic = np.matmul(offset_rotation, extrinsic) extrinsic = np.matmul(offset_transition, extrinsic) camera_to_gripper = np.matmul( combined_RT(camera_rot_to_gripper, np.array([0.0, 0.0, 0.0])), extrinsic) moveit_commander.roscpp_initialize(sys.argv) rospy.init_node('move_group_python_interface_tutorial', anonymous=True) scene = moveit_commander.PlanningSceneInterface() robot_arm = PandaArm() robot_gripper = franka_interface.GripperInterface() obj_server = object_server() pose_estimator = densefusion_docker() if __name__ == '__main__': for i in range(10): obj_server.attach_object("kinect_ros_0", 0.05, -0.05, 0.0) # init robot arm state robot_arm.move_to_neutral() robot_arm.get_gripper().home_joints() robot_arm.get_gripper().open()
import rospy # import tf # import numpy as np # import quaternion from panda_robot import PandaArm # from franka_robot import franka_kinematics # from geometry_msgs.msg import PoseStamped from geometry_msgs.msg import WrenchStamped if __name__ == '__main__': rospy.init_node('test') r = PandaArm() wrench1 = WrenchStamped() wrench1.header.frame_id = "panda_link0" wrench_pub = rospy.Publisher("ee_wrench", WrenchStamped, queue_size=1) rate = rospy.Rate(10) while not rospy.is_shutdown(): if not r.in_safe_state(): print r.get_robot_status() if r.error_in_current_state(): print r.what_errors() break print "-----------------" print "" print "" wrench = r.tip_state()
def combined_RT(rvec, tvec, inv=0): trivial_row = np.array([0, 0, 0, 1]).reshape((1, 4)) extrinsic = np.append(rvec, tvec.reshape((3, 1)), axis=1) extrinsic = np.append(extrinsic, trivial_row, axis=0) return extrinsic if __name__ == '__main__': # camera frame rotate to gripper frame rr = np.array([[1, 0, 0], [0, -1, 0], [0, 0, -1]]) robot = PandaArm() cam_to_gripper_extrinsic = combined_RT(offset_rotation, np.array([0.0, 0.0, 0.0])) cam_to_gripper_extrinsic = np.matmul(offset_transition, cam_to_gripper_extrinsic) cam_to_gripper_extrinsic = np.matmul( combined_RT(rr, np.array([0.0, 0.0, 0.0])), cam_to_gripper_extrinsic) while True: #time.sleep(0.0000001) #print(robot.ee_pose()[1][1]) q = quaternion.as_float_array(robot.ee_pose()[1]) q = Quaternion(q) tvec = robot.ee_pose()[0]
-5.27289847e-05, 2.17926193e+00, 9.10497957e-01 ], [ 1.81297444e-01, 3.94348774e-01, -2.25835923e-01, -1.19416311e+00, -7.51349249e-04, 2.79453565e+00, 8.36526167e-01 ], [ 0.63068724, 0.86207321, -0.52113169, -0.95186331, 0.02450696, 2.64150352, 0.5074312 ]] if __name__ == '__main__': rospy.init_node("panda_env") r = PandaArm( reset_frames=False) # handle to use methods from PandaArm class fi = r.get_frames_interface( ) # frames interface object for the robot. Test switching EE frames # How to test: # 1) open rviz -> add RobotModel (topic 'robot_description') # 2) set panda_link0 as global fixed frame # 3) add tf -> disable visualisation of all links except panda_EE # 4) run this script in terminal in interactive mode # 5) type $ fi.set_EE_frame_to_link('panda_hand') # to move the EE frame to the link. Try different link names. # Test the same for the stiffness frame (set_K_frame_to_link) cm = r.get_controller_manager( ) # controller manager object to get controller states and switch controllers, controllers don't have to be switched manually in most cases! The interface automatically chooses the right command depending on the control command sent (r.exec_position_cmd, r.exec_velocity_cmd, r.exec_torque_cmd, r.set_joint_position_velocity) kin = r._kinematics(
import rospy from panda_robot import PandaArm import tf_conversions as tf import franka_interface import time import sys import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg from math import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list moveit_commander.roscpp_initialize(sys.argv) rospy.init_node('move_group_python_interface_tutorial', anonymous=True) scene = moveit_commander.PlanningSceneInterface() #rospy.init_node("panda_demo") r = PandaArm() while True: time.sleep(0.1) print(r.ee_pose())
inertia = PyKDL.JntSpaceInertiaMatrix(self._num_jnts) self._dyn_kdl.JntToMass(self.joints_to_kdl('positions',joint_values), inertia) return kdl_to_mat(inertia) def cart_inertia(self,joint_values=None): js_inertia = self.inertia(joint_values) jacobian = self.jacobian(joint_values) return np.linalg.inv(jacobian * np.linalg.inv(js_inertia) * jacobian.T) if __name__ == '__main__': rospy.init_node('test') from panda_robot import PandaArm r = PandaArm() # print r.has_gripper # kin = PandaKinematics(r) kin = r._kinematics rate = rospy.Rate(10) while not rospy.is_shutdown(): # print kin.forward_position_kinematics() # argument = r.joint_angles() # combine the names and joint angles to a dictionary, that only is accepted by kdl # jacobian = np.array(kin.jacobian(argument)) print "-----------------" print ""
t -= t[0] plt.plot(t, F_TOT[:, 1], label='$F_x$') plt.plot(t, F_TOT[:, 2], label='$F_y$') plt.plot(t, F_TOT[:, 3], label='$F_z$') plt.legend() plt.title('Reaction Force Plot with $K={}$ and $D = {}$'.format(K_p, D_p)) plt.xlabel('Time (s)') plt.ylabel('Force (N)') plt.savefig('K{}D{}'.format(K_p, D_p)) plt.show() print('Exporting csv') np.savetxt("Cartesion_Reactive_Force_K{}D{}.csv".format(K_p, D_p), F_TOT, delimiter=",") print('Export Done') if __name__ == "__main__": rospy.init_node('panda_imped_control') panda = PandaArm() #Move Panda to neutral position panda.move_to_neutral() # Get initial state of the robot p_i, q_i = set_equilibrium(panda) rospy.on_shutdown(_on_shutdown) publish_rate = 100 rate = rospy.Rate(publish_rate) impedance_control(rate)
NOTE: this code will make the robot go to neutral (with MoveIt or JT action client). After hitting Enter, the robot will slowly move (small, slow wavy motion, similar to the franka_ros position controller demo) using position control or impedance control. NOTE: this is a very simplified code. In ideal case, the control commands should be sent from a controller loop running independently for smooth motion. Run script. Then Press Enter when prompted. """ if __name__ == '__main__': rospy.init_node("ctrl_test") r = PandaArm() rate = rospy.Rate(100) elapsed_time_ = rospy.Duration(0.0) period = rospy.Duration(0.005) r.move_to_neutral() # move to neutral pose before beginning initial_pose = deepcopy(r.angles()) input("Hit Enter to Start") print("commanding") vals = deepcopy(initial_pose) count = 0
import rospy from panda_robot import PandaArm rospy.init_node("panda_demo") r = PandaArm() r.move_to_neutral() r.get_gripper().home_joints() r.get_gripper().open() #r.move_to_joint_position([-8.48556818e-02, -8.88127666e-02, -6.59622769e-01, -1.57569726e+00, -4.82374882e-04, 2.15975946e+00, 4.36766917e-01])
import rospy from panda_robot import PandaArm rospy.init_node('panda_demo') r = PandaArm() r.move_to_neutral() r.move_to_joint_position([ -8.48556818e-02, -8.88127666e-02, -6.59622769e-01, -1.57569726e+00, -4.82374882e-04, 2.15975946e+00, 4.36766917e-01 ]) # move robot to the specified pose
class AgentPanda(Agent): """ All communication between the algorithms and MuJoCo is done through this class. """ def __init__(self, hyperparams): config = copy.deepcopy(AGENT_PANDA) config.update(hyperparams) Agent.__init__(self, config) rospy.init_node('gps_agent_panda') self.period = self._hyperparams['dt'] self.r = rospy.Rate(1. / self.period) self._setup_conditions() # self._setup_world(hyperparams['filename']) self._set_initial_state() ## TODO : decide to use 'use_camera' or not. self.panda = PandaArm() # if RGB_IMAGE in self.obs_data_types: # self.panda.use_camera = True self.pipeline = rs.pipeline() self.config = rs.config() self.config.enable_stream(rs.stream.color, self._hyperparams['image_width'], self._hyperparams['image_height'], rs.format.bgr8, 30) self.pipeline.start(config) def _setup_conditions(self): """ Helper method for setting some hyperparameters that may vary by condition. """ conds = self._hyperparams['conditions'] ''' for field in ('x0', 'x0var', 'pos_body_idx', 'pos_body_offset', 'noisy_body_idx', 'noisy_body_var', 'filename'): ''' for field in ('x0', 'x0var', 'pos_body_idx', 'pos_body_offset', 'noisy_body_idx', 'noisy_body_var'): self._hyperparams[field] = setup(self._hyperparams[field], conds) def _set_initial_state(self): self.x0 = [] for i in range(self._hyperparams['conditions']): if END_EFFECTOR_POINTS in self.x_data_types: ''' eepts = np.array(self.baxter.get_baxter_end_effector_pose()).flatten() self.x0.append( np.concatenate([self._hyperparams['x0'][i], eepts, np.zeros_like(eepts)]) ) ''' self.x0.append(self._hyperparams['x0'][i]) else: self.x0.append(self._hyperparams['x0'][i]) if IMAGE_FEAT in self.x_data_types: self.x0[i] = np.concatenate([ self.x0[i], np.zeros((self._hyperparams['sensor_dims'][IMAGE_FEAT], )) ]) ## NOT CALLED <-- REPLACED TO self.panda._setup_panda_world() def sample(self, policy, condition, iteration, verbose=True, save=True, noisy=True): """ Runs a trial and constructs a new sample containing information about the trial. Args: policy: Policy to to used in the trial. condition: Which condition setup to run. verbose: Whether or not to plot the trial. save: Whether or not to store the trial into the samples. noisy: Whether or not to use noise during sampling. """ # Create new sample, populate first time step. feature_fn = None if 'get_features' in dir(policy): feature_fn = policy.get_features # noisy = False ## TODO : where below line should be located? new_sample = self._init_sample(condition, feature_fn=feature_fn) mj_X = self._hyperparams['x0'][condition] U = np.zeros([self.T, self.dU]) U_origin = np.zeros([self.T, self.dU]) if noisy: noise = generate_noise(self.T, self.dU, self._hyperparams) else: noise = np.zeros((self.T, self.dU)) if np.any(self._hyperparams['x0var'][condition] > 0): x0n = self._hyperparams['x0var'] * \ np.random.randn(self._hyperparams['x0var'].shape) mj_X += x0n noisy_body_idx = self._hyperparams['noisy_body_idx'][condition] if noisy_body_idx.size > 0: for i in range(len(noisy_body_idx)): idx = noisy_body_idx[i] var = self._hyperparams['noisy_body_var'][condition][i] self._model[condition]['body_pos'][idx, :] += \ var * np.random.randn(1, 3) torq_max1 = 45 # origin 87 torq_max2 = 6 # origin 12 # self.panda.set_force_threshold_for_collision([20, 20, 10, 25, 25, 25]) # X,Y,Z,R,P,Y self.panda.set_collision_threshold( cartesian_forces=[20, 20, 10, 25, 25, 25]) # X,Y,Z,R,P,Y while True: # panda : move to joint position ## TODO : where below line should be located? # new_sample = self._init_sample(condition, feature_fn=feature_fn) # new_sample: class 'Sample' try: # self.panda.enable_robot() if not self.panda.is_enabled_robot(): raise StopIteration self.panda.move_to_joint_position( self._hyperparams['x0'][condition][0:7]) time.sleep(2) # Take the sample. for t in range(self.T): X_t = new_sample.get_X(t=t) obs_t = new_sample.get_obs(t=t) mj_U = policy.act(X_t, obs_t, t, noise[t, :]) mj_U_origin = mj_U.copy() for i in range(len(mj_U)): if i < 4 and mj_U[i] > torq_max1: mj_U[i] = torq_max1 elif i < 4 and mj_U[i] < -torq_max1: mj_U[i] = -torq_max1 elif i >= 4 and mj_U[i] > torq_max2: mj_U[i] = torq_max2 elif i >= 4 and mj_U[i] < -torq_max2: mj_U[i] = -torq_max2 U[t, :] = mj_U U_origin[t, :] = mj_U_origin # print 'mj_U: ', mj_U # print 'mj_U dict: ', self.list_to_dict(mj_U) if (t + 1) < self.T: # self.panda.enable_robot() # for _ in range(self._hyperparams['substeps']): if self.panda.has_collided(): raise StopIteration if not self.panda.is_enabled_robot(): raise StopIteration # panda move with mj_U # self.panda.set_joint_velocities(self.list_to_dict(mj_U)) # self.panda.exec_velocity_cmd(mj_U) self.panda.exec_torque_cmd(mj_U) # self.panda.exec_position_cmd(mj_U) print("current step(t): ", t) self._set_sample(new_sample, mj_X, t, condition, feature_fn=feature_fn) self.r.sleep() # to sample data at some frequency for i in range( 15 ): # Torque commands for allowing robot finish the trajectory self.panda.exec_torque_cmd([0, 0, 0, 0, 0, 0, 0]) time.sleep(1) break except StopIteration: print("robot stopped!!!") self.panda.enable_robot() time.sleep(2) continue finally: f = '/home/panda_gps/gps/experiments/panda_test_dongju/action_origin_' + str( iteration) + '.npy' np.save(f, U_origin) f = '/home/panda_gps/gps/experiments/panda_test_dongju/action_clipped_' + str( iteration) + '.npy' np.save(f, U) new_sample.set(ACTION, U) new_sample.set(NOISE, noise) if save: self._samples[condition].append(new_sample) return new_sample def _init_sample(self, condition, feature_fn=None): """ Construct a new sample and fill in the first time step. Args: condition: Which condition to initialize. feature_fn: funciton to comptue image features from the observation. """ sample = Sample(self) self.panda.move_to_joint_position( self._hyperparams['x0'][condition][0:7]) # Initialize sample with stuff from _data # panda : get joint positions self.prev_positions = self.panda.angles() ## TODO : replace below line with panda function # get panda joint positions sample.set(JOINT_ANGLES, self.prev_positions, t=0) # get panda joint velocities sample.set(JOINT_VELOCITIES, self.panda.velocities(), t=0) # get panda end effector positions ee_point, ee_ori = self.panda.ee_pose() sample.set(END_EFFECTOR_POINTS, ee_point, t=0) # get panda end effector velocity ee_vel, ee_omg = self.panda.ee_velocity() sample.set(END_EFFECTOR_POINT_VELOCITIES, np.array(list(ee_vel) + list(ee_omg)), t=0) # get panda jacobian sample.set(END_EFFECTOR_POINT_JACOBIANS, self.panda.jacobian(), t=0) ## TODO : check whether below line is neccessary or not. if (END_EFFECTOR_POINTS_NO_TARGET in self._hyperparams['obs_include']): sample.set(END_EFFECTOR_POINTS_NO_TARGET, np.delete(eepts, self._hyperparams['target_idx']), t=0) sample.set(END_EFFECTOR_POINT_VELOCITIES_NO_TARGET, np.delete(np.zeros_like(eepts), self._hyperparams['target_idx']), t=0) ## TODO : enable this again when after install camera # only save subsequent images if image is part of observation if RGB_IMAGE in self.obs_data_types: ## TODO : replace below line with other function # ex 1: # self.img = self.baxter.get_baxter_camera_image() # sample.set(RGB_IMAGE, np.transpose(self.img, (2, 1, 0)).flatten(), t = 0) # ex 2: # sample.set(RGB_IMAGE, img_data, t=0) sample.set(RGB_IMAGE_SIZE, [ self._hyperparams['image_channels'], self._hyperparams['image_width'], self._hyperparams['image_height'] ], t=None) if IMAGE_FEAT in self.obs_data_types: raise ValueError( 'Image features should not be in observation, just state') if feature_fn is not None: obs = sample.get_obs( ) # Assumes that the rest of the sample has been populated sample.set(IMAGE_FEAT, feature_fn(obs), t=0) else: sample.set( IMAGE_FEAT, np.zeros((self._hyperparams['sensor_dims'][IMAGE_FEAT], )), t=0) return sample def _set_sample(self, sample, mj_X, t, condition, feature_fn=None): """ Set the data for a sample for one time step. Args: sample: Sample object to set data for. mj_X: Data to set for sample. t: Time step to set for sample. condition: Which condition to set. feature_fn: function to compute image features from the observation. """ curr_positions = self.panda.angles() sample.set(JOINT_ANGLES, curr_positions, t=t + 1) sample.set(JOINT_VELOCITIES, self.panda.velocities(), t=t + 1) ee_point, ee_ori = self.panda.ee_pose() sample.set(END_EFFECTOR_POINTS, list(ee_point), t=t + 1) ee_vel, ee_omg = self.panda.ee_velocity() sample.set(END_EFFECTOR_POINT_VELOCITIES, list(ee_vel) + list(ee_omg), t=t + 1) sample.set(END_EFFECTOR_POINT_JACOBIANS, self.panda.jacobian(), t=t + 1) time_out = True """ s0 = time.time() while (np.all(self.prev_positions == curr_positions)): s1 = time.time() if s1-s0 >= 0.1: break """ self.prev_positions = curr_positions print('Joint Positions: ' + repr(self.prev_positions) + '\n') ## TODO : enable this again when after install camera if RGB_IMAGE in self.obs_data_types: ## TODO : replace below line with panda function frames = pipline.wait_for_frames() color_frames = frames.get_color_frame() self.img = np.asanyarray(color_frame.get_data()) cv2.imshow('realsense', self.img) cv2.waitKey() sample.set(RGB_IMAGE, np.transpose(self.img, (2, 1, 0)).flatten(), t=t + 1) ## TODO : check whether below line is neccessary sample.set(RGB_IMAGE_SIZE, [ self._hyperparams['image_channels'], self._hyperparams['image_width'], self._hyperparams['image_height'] ], t=None) if feature_fn is not None: obs = sample.get_obs( ) # Assumes that the rest of the observation has been populated sample.set(IMAGE_FEAT, feature_fn(obs), t=t + 1) else: sample.set( IMAGE_FEAT, np.zeros((self._hyperparams['sensor_dims'][IMAGE_FEAT], )), t=t + 1) def _get_image_from_obs(self, obs): imstart = 0 imend = 0 image_channels = self._hyperparams['image_channels'] image_width = self._hyperparams['image_width'] image_height = self._hyperparams['image_height'] for sensor in self._hyperparams['obs_include']: # Assumes only one of RGB_IMAGE or CONTEXT_IMAGE is present if sensor == RGB_IMAGE or sensor == CONTEXT_IMAGE: imend = imstart + self._hyperparams['sensor_dims'][sensor] break else: imstart += self._hyperparams['sensor_dims'][sensor] img = obs[imstart:imend] img = img.reshape((image_width, image_height, image_channels)) ## TODO : check whether below line is neccessary img = img.astype(np.uint8) return img def list_to_dict(self, joint_list): joint_name_list = self.panda.joint_names() joint_dict = {} for i in range(len(joint_list)): joint_dict[joint_name_list[i]] = joint_list[i] return joint_dict
msg.orientation.z = quat.z msg.orientation.w = quat.w return msg def new_ori(quat1, quat2): return quaternion.from_rotation_matrix(quaternion.as_rotation_matrix(quat1)*quaternion.as_rotation_matrix(quat2)) if __name__ == '__main__': rospy.init_node("panda_env") pub = rospy.Publisher("/pose_test",PoseStamped,queue_size=10) r = PandaArm(reset_frames = True) # handle to use methods from PandaArm class fi = r.get_frames_interface() # frames interface object for the robot. Test switching EE frames # How to test: # 1) open rviz -> add RobotModel (topic 'robot_description') # 2) set panda_link0 as global fixed frame # 3) add tf -> disable visualisation of all links except panda_EE # 4) run this script in terminal in interactive mode # 5) type $ fi.set_EE_frame_to_link('panda_hand') # to move the EE frame to the link. Try different link names. # Test the same for the stiffness frame (set_K_frame_to_link) cm = r.get_controller_manager() # controller manager object to get controller states and switch controllers kin = r._kinematics # to test the kinematics (not required, can directly query kinematics using methods in PandaArm) g = r.get_gripper() # gripper object. Test using $ g.close(), $ g.open(), $ g.home_joints(), $g.move_joints(0.01), etc.
import copy import rospy import moveit_commander import moveit_msgs.msg import geometry_msgs.msg import time from math import pi from std_msgs.msg import String from moveit_commander.conversions import pose_to_list from utils.object_attach_control import object_server moveit_commander.roscpp_initialize(sys.argv) rospy.init_node('move_group_python_interface_tutorial', anonymous=True) scene = moveit_commander.PlanningSceneInterface() robot_arm = PandaArm() robot_gripper = franka_interface.GripperInterface() obj_server = object_server() if __name__ == '__main__': for i in range(10): obj_server.attach_object("kinect_ros_0", 0.05, -0.05, 0.0) # init robot arm state robot_arm.move_to_neutral() robot_arm.get_gripper().home_joints() robot_arm.get_gripper().open()
import rospy from panda_robot import PandaArm import numpy as np from copy import deepcopy # test file; used for tuning impedance controller gains vals = [] vels = [] names = ['panda_joint1','panda_joint2','panda_joint3','panda_joint4','panda_joint5','panda_joint6','panda_joint7'] if __name__ == '__main__': rospy.init_node("test_node", disable_signals = True) r = PandaArm() rate = rospy.Rate(100) initial_pose = deepcopy(r.joint_ordered_angles()) vals = deepcopy(initial_pose) i = 0.1 r.move_to_neutral() input("Hit Enter to Start") joints = r.angles() limits = r.joint_limits()
Test by pushing robot. It should be 'springy'. Warning: The default torque and force safety limits of the robot is disabled for this test. Otherwise pushing a little hard will trigger safety stop from libfranka. """ def _clean_shutdown(): # reset collision thresholds to libfranka defaults when demo ends r.set_collision_threshold() if __name__ == '__main__': rospy.init_node("controller_env") r = PandaArm() force_threshold = [100,100,100,100,100,100] # cartesian force threshold torque_threshold = [100,100,100,100,100,100,100] # joint torque threshold # increase collision detection thresholds for testing r.set_collision_threshold(joint_torques = torque_threshold, cartesian_forces = force_threshold) rospy.on_shutdown(_clean_shutdown) initial_pose = r.joint_ordered_angles() rate = rospy.Rate(100) while not rospy.is_shutdown(): # robot should be holding the same position in joint impedance mode vels = r.joint_velocities()
def map_keyboard(): """ Map keyboard keys to robot joint motion. Keybindings can be found when running the script. """ limb = PandaArm() has_gripper = limb.get_gripper() is not None # joints = limb.joint_names() def set_ee_target(action, value): pos, ori = limb.ee_pose() if action == 'position': pos += value status, j_des = limb.inverse_kinematics(pos, ori) # print j_des if status: limb.exec_position_cmd(j_des) def set_g(action): if has_gripper: if action == "close": limb.get_gripper().close() elif action == "open": limb.get_gripper().open() elif action == "calibrate": limb.get_gripper().calibrate() def reset_robot(args): limb.untuck() bindings = { '5': (set_ee_target, ['position', np.asarray([pos_increment, 0, 0])], "x increase"), '2': (set_ee_target, ['position', np.asarray([-pos_increment, 0, 0])], "x decrease"), '1': (set_ee_target, ['position', np.asarray([0, pos_increment, 0])], "y increase"), '3': (set_ee_target, ['position', np.asarray([0, -pos_increment, 0])], "y decrease"), '7': (set_ee_target, ['position', np.asarray([0, 0, pos_increment])], "z increase"), '4': (set_ee_target, ['position', np.asarray([0, 0, -pos_increment])], "z decrease"), 'r': (reset_robot, [None], "reset to neutral pose") } if has_gripper: bindings.update({ '8': (set_g, "close", "close gripper"), '9': (set_g, "open", "open gripper"), 'i': (set_g, "calibrate", "calibrate gripper") }) done = False print("Controlling joints. Press ? for help, Esc to quit.") while not done and not rospy.is_shutdown(): c = getch() if c: #catch Esc or ctrl-c if c in ['\x1b', '\x03']: done = True rospy.signal_shutdown("Example finished.") elif c in bindings: cmd = bindings[c] if c == '8' or c == 'i' or c == '9': cmd[0](cmd[1]) print("command: %s" % (cmd[2],)) else: #expand binding to something like "set_j(right, 'j0', 0.1)" cmd[0](*cmd[1]) print("command: %s" % (cmd[2],)) else: print("key bindings: ") print(" Esc: Quit") print(" ?: Help") for key, val in sorted(bindings.items(), key=lambda x: x[1][2]): print(" %s: %s" % (key, val[2]))
import rospy from panda_robot import PandaArm if __name__ == '__main__': rospy.init_node("controller_env") r = PandaArm() fi = r.get_frames_interface() cm = r.get_controller_manager() while True: # robot should be holding the same position in joint impedance mode vels = r.joint_velocities() r.set_joint_positions_velocities( r.joint_ordered_angles(), [vels[j] for j in r.joint_names() ]) # impedance control command (see documentation at )
import rospy from panda_robot import PandaArm poses = [[-8.48556818e-02, -8.88127666e-02, -6.59622769e-01, -1.57569726e+00, -4.82374882e-04, 2.15975946e+00, 4.36766917e-01], [ 1.34695728e-01, -2.74474940e-01, -2.46027836e-01, -1.19805447e+00, -5.27289847e-05, 2.17926193e+00, 9.10497957e-01], [ 1.81297444e-01, 3.94348774e-01, -2.25835923e-01, -1.19416311e+00, -7.51349249e-04, 2.79453565e+00, 8.36526167e-01], [ 0.63068724, 0.86207321, -0.52113169, -0.95186331, 0.02450696, 2.64150352, 0.5074312 ]] if __name__ == '__main__': rospy.init_node("panda_env") r = PandaArm(reset_frames = False) # handle to use methods from PandaArm class fi = r.get_frames_interface() # frames interface object for the robot. Test switching EE frames # How to test: # 1) open rviz -> add RobotModel (topic 'robot_description') # 2) set panda_link0 as global fixed frame # 3) add tf -> disable visualisation of all links except panda_EE # 4) run this script in terminal in interactive mode # 5) type $ fi.set_EE_frame_to_link('panda_hand') # to move the EE frame to the link. Try different link names. # Test the same for the stiffness frame (set_K_frame_to_link) cm = r.get_controller_manager() # controller manager object to get controller states and switch controllers kin = r._kinematics() # to test the kinematics (not required, can directly query kinematics using methods in PandaArm) g = r.get_gripper() # gripper object. Test using $ g.close(), $ g.open(), $ g.home_joints(), $g.move_joints(0.01), etc.
from panda_robot import PandaArm import rospy # from franka_tools import FrankaFramesInterface # import tf import numpy as np # import quaternion if __name__ == '__main__': rospy.init_node('test') r = PandaArm(reset_frames=False) angles = r.joint_angles() names = r.joint_names() vals = [0.000, -0.785, 0.0, -1.9, 0.0, 1.57, 0.785] def convert_to_dict(val): retval = {} for n, name in enumerate(names): retval[name] = val[n] return retval def sendto(values): r.move_to_joint_positions(convert_to_dict(values)) print "err", np.asarray(values) - np.asarray(r.joint_ordered_angles()) cm = r.get_controller_manager()