self.clientID, self.KinectRgbHandle, 0, vrep.simx_opmode_blocking) color_img = np.asarray(raw_image) color_img.shape = (resolution[1], resolution[0], 3) color_img = color_img.astype(np.float) / 255 color_img[color_img < 0] += 1 color_img *= 255 color_img = np.fliplr(color_img) color_img = color_img.astype(np.uint8) center = (resolution[0] / 2, resolution[1] / 2) size = (resolution[0], resolution[1]) rotateMat = cv2.getRotationMatrix2D(center, 180, 1) result_img = cv2.warpAffine(color_img, rotateMat, size) return result_img def get_depth_image(self): sim_ret, resolution, depth_buffer = vrep.simxGetVisionSensorDepthBuffer( self.clientID, self.KinectDepthHandle, vrep.simx_opmode_blocking) depth_img = np.asarray(depth_buffer) depth_img.shape = (resolution[1], resolution[0]) depth_img = np.fliplr(depth_img) return depth_img if __name__ == '__main__': camera = Camera(vrep_connect.getVrep_connect()) rgb = camera.get_rgb_image() cv2.imshow("rgb", rgb) cv2.waitKey(0) cv2.destroyAllWindows()
os.path.join(save_dir, "{0}_{1}.json".format(methodstr, time_str))) if __name__ == '__main__': method = { "no_Local": 0, "std": 1, "no_local_std": 3, #"rme":2, #"no_lock_rme":4, "random": 5, "ias": 6 } board = apriltagboard.AprilTagBoard("../config/apriltag.yml", "../config/tagId.csv") cliend = vrep_connect.getVrep_connect() camera = Kinect_test.Camera(cliend, "../config/intrinsic_gt.yml") robot = LBR4p.Robot(cliend) move_lock = lock(robot, camera) threads = [] fs = cv2.FileStorage("../config/init_handtoeye_robot_pose.yml", cv2.FileStorage_READ) init_pose = fs.getNode("initpose").mat() q = init_pose[:4].flatten() pose_r = transforms3d.quaternions.quat2mat(q) move_lock = lock(robot, camera) pose = np.identity(4) pose[:3, :3] = pose_r[:, :] pose[0, 3] = init_pose[4]
# _*_ coding:utf-8 _*_ # @time: 2020/10/8 下午7:50 # @author: 张新新 # @email: [email protected] from Vrep import LBR4p from Vrep import vrep_connect id = vrep_connect.getVrep_connect() robot = LBR4p.Robot(id) pose = robot.getPoseMatrix() pose[2, 3] = 0.725 print(robot.move(pose)) z_max = pose[2, 3] # z_min = 0 # while True: # z_mid = (z_max+z_min)/2 # pose[2,3] = z_mid # flag = robot.move(pose) # if flag: # z_max = z_mid # else: # z_min = z_mid # if z_max-z_mid<0.01: # print(z_max,z_min) # print(z_max,z_min)
os.path.join(save_dir, "{0}_{1}.json".format(methodstr, time_str))) if __name__ == '__main__': method = { "no_Local": 0, "std": 1, "no_local_std": 3, #"rme":2, #"no_lock_rme":4, "random": 5, "ias": 6 } board = apriltagboard.AprilTagBoard("../config/apriltag.yml", "../config/tagId.csv") cliend = vrep_connect.getVrep_connect(19998) camera = Kinect_test.Camera(cliend, "../config/intrinsic_gt.yml") robot = LBR4p.Robot(cliend) move_lock = lock(robot, camera) threads = [] # init_robot_pose_list = utils.json_load("../config/init_robot_pose.json") for i in range(100): print("start to cal {} iters data".format(i)) start = time.time() # 中间写上代码块 auto_handeye = auto_handeye_calibration_thread.auto_handeye_calibration( board, robot, camera, "../config/auto_set_to.yml", move_lock) # auto_handeye.set_init_robot_pose(random.choice(init_robot_pose_list)) auto_handeye.init_handeye()
z = t[2] if distance > 0.82: return False if x**2 + y**2 + (z - 0.36)**2 < 0.42**2: return False return True def set_simu(self, simu): self.simu_q = simu self.simu_t = simu def get_simu(self): return self.simu_q if __name__ == '__main__': robot = Robot(vrep_connect.getVrep_connect()) pose = np.zeros([4, 4]) pose_r = transforms3d.euler.euler2mat(math.pi / 6, math.pi / 4, math.pi / 3, 'rxyz') q = transforms3d.euler.euler2quat(math.pi / 6, math.pi / 4, math.pi / 3, 'rxyz') print(q) euler = np.array([math.pi / 6, math.pi / 4, math.pi / 3]) pose_t = np.array([0.3, 0.3, 0.3]) pose[:3, :3] = pose_r[:, :] pose[:3, 3] = pose_t[:] robot.move(pose)