Beispiel #1
0
 def estimateNormals(self, points):
     pcd = PointCloud()
     pcd.points = Vector3dVector(points)
     estimate_normals(pcd,
                      search_param=KDTreeSearchParamHybrid(radius=0.1,
                                                           max_nn=20))
     return np.asarray(pcd.normals)
Beispiel #2
0
def save_to_ply(points, filename, colors=None):
  pcd = o3d.geometry.PointCloud()
  pcd.points = o3d.utility.Vector3dVector(points)
  o3d.estimate_normals(pcd)
  if colors is not None:
    pcd.colors = o3d.utility.Vector3dVector(colors)
  o3d.io.write_point_cloud(filename, pcd)
Beispiel #3
0
def NormalEstimate(points):
    """ 法線推定 """

    #点群をnp配列⇒open3d形式に
    pointcloud = open3d.PointCloud()
    pointcloud.points = open3d.Vector3dVector(points)

    # 法線推定
    open3d.estimate_normals(pointcloud,
                            search_param=open3d.KDTreeSearchParamHybrid(
                                radius=5, max_nn=30))

    # 法線の方向を視点ベースでそろえる
    open3d.orient_normals_towards_camera_location(pointcloud,
                                                  camera_location=np.array(
                                                      [0., 10., 10.],
                                                      dtype="float64"))

    #nキーで法線表示
    #open3d.draw_geometries([pointcloud])

    #法線をnumpyへ変換
    normals = np.asarray(pointcloud.normals)

    return normals
Beispiel #4
0
def ViewPLY(path):
    # 読み込み
    pointcloud = open3d.read_point_cloud(path)

    # ダウンサンプリング
    # 法線推定できなくなるので不採用
    pointcloud = open3d.voxel_down_sample(pointcloud, 10)

    # 法線推定
    open3d.estimate_normals(pointcloud,
                            search_param=open3d.KDTreeSearchParamHybrid(
                                radius=20, max_nn=30))

    # 法線の方向を視点ベースでそろえる
    open3d.orient_normals_towards_camera_location(pointcloud,
                                                  camera_location=np.array(
                                                      [0., 10., 10.],
                                                      dtype="float64"))

    # 可視化
    open3d.draw_geometries([pointcloud])

    # numpyに変換
    points = np.asarray(pointcloud.points)
    normals = np.asarray(pointcloud.normals)

    print("points:{}".format(points.shape[0]))

    X, Y, Z = Disassemble(points)

    # OBB生成
    _, _, length = buildOBB(points)
    print("OBB_length: {}".format(length))

    return points, X, Y, Z, normals, length
Beispiel #5
0
    def estimateTransform(self, source, target):
        source = od.read_point_cloud(source)
        target = od.read_point_cloud(target)
        current_transformation = self.base_transform

        for scale in range(len(self.voxel_radius)):
            iterations = self.max_iter[scale]
            radius = self.voxel_radius[scale]

            source_down = od.voxel_down_sample(source, radius)
            target_down = od.voxel_down_sample(target, radius)

            od.estimate_normals(
                source_down,
                od.KDTreeSearchParamHybrid(radius=2 * radius,
                                           max_nn=self.max_nn))
            od.estimate_normals(
                target_down,
                od.KDTreeSearchParamHybrid(radius=2 * radius,
                                           max_nn=self.max_nn))

            result_icp = od.registration_colored_icp(
                source_down, target_down, radius, current_transformation,
                od.ICPConvergenceCriteria(
                    relative_fitness=self.relative_fitness,
                    relative_rmse=self.relative_rmse,
                    max_iteration=iterations))

            current_transformation = result_icp.transformation
            # self.draw_registration_result_original_color(
            # source_down, target_down, current_transformation)

        return current_transformation
Beispiel #6
0
def NormalEstimate(points):
    #objファイルから点群取得
    # points = loadOBJ(path)
    print("points:{}".format(points.shape[0]))

    #点群をnp配列⇒open3d形式に
    pointcloud = open3d.PointCloud()
    pointcloud.points = open3d.Vector3dVector(points)

    # 法線推定
    open3d.estimate_normals(pointcloud,
                            search_param=open3d.KDTreeSearchParamHybrid(
                                radius=5, max_nn=30))

    # 法線の方向を視点ベースでそろえる
    open3d.orient_normals_towards_camera_location(pointcloud,
                                                  camera_location=np.array(
                                                      [0., 10., 10.],
                                                      dtype="float64"))

    #nキーで法線表示
    #open3d.draw_geometries([pointcloud])

    #法線をnumpyへ変換
    normals = np.asarray(pointcloud.normals)

    #OBB生成
    #(最適化の条件にも使いたい)
    # _, _, length = buildOBB(points)
    # print("OBB_length: {}".format(length))

    return normals
Beispiel #7
0
def sample_point_cloud_feature(point_cloud: op3.PointCloud,
                               voxel_size: float,
                               verbose=False):
    """
    Down sample and sample the point cloud feature
    :param point_cloud: an object of Open3D
    :param voxel_size: a float value, that is how sparse you want the sample points is
    :param verbose: a boolean value, display notification and visualization when True and no notification when False
    :return: 2 objects of Open3D, that are down-sampled point cloud and point cloud feature fpfh
    """
    if verbose: print(":: Downsample with a voxel size %.3f." % voxel_size)
    point_cloud_down_sample = op3.voxel_down_sample(point_cloud, voxel_size)

    radius_normal = voxel_size * 2
    if verbose:
        print(":: Estimate normal with search radius %.3f." % radius_normal)
    op3.estimate_normals(
        point_cloud_down_sample,
        op3.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=30))

    radius_feature = voxel_size * 5
    if verbose:
        print(":: Compute FPFH feature with search radius %.3f." %
              radius_feature)
    point_cloud_fpfh = op3.compute_fpfh_feature(
        point_cloud_down_sample,
        op3.KDTreeSearchParamHybrid(radius=radius_feature, max_nn=100))
    return point_cloud_down_sample, point_cloud_fpfh
Beispiel #8
0
def estimate_normals(pcd, params):
    if o3.__version__ >= '0.8.0.0':
        pcd.estimate_normals(search_param=params)
        pcd.orient_normals_to_align_with_direction()
    else:
        o3.estimate_normals(pcd, search_param=params)
        o3.orient_normals_to_align_with_direction(pcd)
Beispiel #9
0
def icp_registration(source, target, distance_threshold):
    pn.estimate_normals(source)
    pn.estimate_normals(target)
    icp_result = pn.registration_icp(source, target, distance_threshold,
                                     np.eye(4),
                                     pn.TransformationEstimationPointToPlane())
    return icp_result
Beispiel #10
0
def prepare_source_and_target_rigid_3d(source_filename,
                                       noise_amp=0.001,
                                       n_random=500,
                                       orientation=np.deg2rad([0.0, 0.0,
                                                               30.0]),
                                       normals=False):
    source = o3.read_point_cloud(source_filename)
    source = o3.voxel_down_sample(source, voxel_size=0.005)
    print(source)
    target = copy.deepcopy(source)
    tp = np.asarray(target.points)
    rg = 1.5 * (tp.max(axis=0) - tp.min(axis=0))
    rands = (np.random.rand(n_random, 3) - 0.5) * rg + tp.mean(axis=0)
    target.points = o3.Vector3dVector(
        np.r_[tp + noise_amp * np.random.randn(*tp.shape), rands])
    ans = trans.euler_matrix(*orientation)
    target.transform(ans)
    if normals:
        o3.estimate_normals(source,
                            search_param=o3.geometry.KDTreeSearchParamHybrid(
                                radius=0.1, max_nn=50))
        o3.orient_normals_to_align_with_direction(source)
        o3.estimate_normals(target,
                            search_param=o3.geometry.KDTreeSearchParamHybrid(
                                radius=0.1, max_nn=50))
        o3.orient_normals_to_align_with_direction(target)
    return source, target
Beispiel #11
0
def icp(A1,A2,weight=0,norm = 0,detail=0):
    # put A1->A2
    loss = 1000
    o3d.estimate_normals(A2)
    norm0 = np.array(A2.normals)
    Trans0 = np.eye(4)
    i = 0
    while(1):
        i+=1
        point1 = np.array(A1.points)
        point2 = np.array(A2.points)
        p1,p2,indice = point_matching(point1,point2)
        norm = norm0[indice]
        weight = np.linalg.norm((p1 - p2), axis=1)
        avg_weight = 0.45  # np.average(weight)
        weight = (avg_weight / (weight + avg_weight))[:, np.newaxis]
        newloss = icploss(p1, p2, weight)
        if ((newloss) > 1e-5)&(i<80):
            loss = newloss
            if (i%10==0)|(detail==1):
                print("converging iter " + str(i) + ", now loss is "+ str(newloss))
        else:
            print("converged: loss is "+str(newloss))
            break
        Trans = cal_transformation(p1, p2, weight)
        #Trans = cal_transformation_p2pl(p1,p2,normofp2=norm,weight=weight)
        A1.transform(Trans)
        Trans0 = Trans0.dot(Trans)
    print(Trans0)
    return A1,Trans0
Beispiel #12
0
def o3d_estimate_normals(source):
    # downsample and estimate normals
    source = o3d.voxel_down_sample(source, voxel_size = 2.5)
    o3d.estimate_normals(source, search_param = o3d.KDTreeSearchParamHybrid(
            radius = 5, max_nn = 30))

    return source
Beispiel #13
0
def _get_features(cloud):
    o3.estimate_normals(
        cloud,
        o3.KDTreeSearchParamHybrid(radius=Param["normal_radius"],
                                   max_nn=Param["normal_max_nn"]))
    return o3.compute_fpfh_feature(
        cloud,
        o3.KDTreeSearchParamHybrid(radius=Param["feature_radius"],
                                   max_nn=Param["feature_max_nn"]))
def draw_registration_result(source, target, transformation):
    source_temp = copy.deepcopy(source)
    target_temp = copy.deepcopy(target)
    open3d.estimate_normals(source_temp)
    open3d.estimate_normals(target_temp)
    source_temp.paint_uniform_color([1, 0.706, 0])
    target_temp.paint_uniform_color([0, 0.651, 0.929])
    source_temp.transform(transformation)
    open3d.draw_geometries([source_temp, target_temp])
def update_nn_correspondences(correspondences, model, raw_pc,
                              max_nn_dist, nc_3d, visualize=False):
  w = 0.001
  pcd = o3d.geometry.PointCloud()
  print(raw_pc.shape)
  pcd.points = o3d.utility.Vector3dVector(raw_pc)
  o3d.estimate_normals(pcd)
  signs = np.sign(np.array(pcd.normals).dot(np.array([1.0,0,0])))
  pcd.normals = o3d.utility.Vector3dVector(signs[:, np.newaxis]*np.array(pcd.normals))
  #o3d.io.write_point_cloud('hey.ply', pcd)
  #dirc = np.array([[a*1.0,0,0] for a in range(1000)])/100.0
  #pcd0 = o3d.geometry.PointCloud()
  #pcd0.points = o3d.utility.Vector3dVector(dirc)
  #o3d.draw_geometries([pcd, pcd0])

  raw = np.concatenate([raw_pc, w*np.array(pcd.normals)], axis=1)
  mesh = o3d.geometry.TriangleMesh()
  mesh.vertices = o3d.utility.Vector3dVector(model.verts)
  mesh.triangles = o3d.utility.Vector3iVector(model.faces)
  mesh.compute_vertex_normals()
  model_v = np.concatenate([model.verts, w*np.array(mesh.vertex_normals)], axis=1)

  """
  Compute nearest neighbor correspondences
  from current mesh vertices to point cloud.
  """
  tree = NN(n_neighbors=1).fit(model_v)
  dists, indices = tree.kneighbors(raw)
  indices = indices[:, 0]
  idx = np.where(dists < max_nn_dist)[0]
  nnmask = idx
  indices = indices[idx]
  dists = np.abs(np.sum(np.multiply(raw_pc[idx, :]-model_v[indices, :3], model_v[indices, 3:]), axis=1))
  dists = np.square(dists)
  argmax = np.argmax(dists)
  nn_vertices = raw_pc[idx, :]
  #nn_vertices = model.verts[indices[idx], :]
  nn_vec = np.array(nn_vertices).reshape(-1)
  sigma2 = np.median(dists)
  #print('sigma2=%f' % sigma2)
  nn_weights=np.ones(dists.shape) # (np.ones(dists.shape)*sigma2/(np.ones(dists.shape)*sigma2+dists)) #np.exp(-dists/(2*sigma2))
  if visualize:
    #print(dists[argmax])
    #print(nn_weights[argmax])
    #import ipdb; ipdb.set_trace()
    visualize_points([model.verts, raw_pc, connection(raw_pc[idx[argmax], :], model_v[indices[argmax], :3])])

  indices3d = correspondences['indices3d'][:nc_3d]
  target3d = correspondences['target3d'][:nc_3d].reshape((-1, 3))
  weights3d = correspondences['weights3d'][:nc_3d]
  indices3d = np.concatenate([indices3d, indices], axis=0)
  target3d = np.concatenate([target3d, raw_pc], axis=0)
  weights3d = np.concatenate([weights3d, nn_weights], axis=0)
  correspondences['indices3d'] = indices3d
  correspondences['target3d'] = target3d
  correspondences['weights3d'] = weights3d
Beispiel #16
0
    def __getitem__(self, index):
        fn = self.datapath[index]
        cls = self.classes[self.datapath[index][0]]
        point_set = np.loadtxt(fn[1]).astype(np.float32)
        # print("Origin Point size:", len(point_set))
        seg = np.loadtxt(fn[2]).astype(np.int32)

        point_set, seg = grid_subsampling(point_set,
                                          labels=seg,
                                          sampleDl=self.first_subsampling_dl)

        # Center and rescale point for 1m radius
        pmin = np.min(point_set, axis=0)
        pmax = np.max(point_set, axis=0)
        point_set -= (pmin + pmax) / 2
        scale = np.max(np.linalg.norm(point_set, axis=1))
        point_set *= 1.0 / scale

        if self.data_augmentation and self.split == 'train':
            theta = np.random.uniform(0, np.pi * 2)
            rotation_matrix = np.array([[np.cos(theta), -np.sin(theta)],
                                        [np.sin(theta),
                                         np.cos(theta)]])
            # TODO: why only rotate the x and z axis??
            point_set[:, [0, 2]] = point_set[:, [0, 2]].dot(
                rotation_matrix)  # random rotation
            point_set += np.random.normal(
                0, 0.001, size=point_set.shape)  # random jitter

        pcd = make_point_cloud(point_set)
        open3d.estimate_normals(pcd)
        normals = np.array(pcd.normals)

        if self.config.in_features_dim == 1:
            features = np.ones([point_set.shape[0], 1])
        elif self.config.in_features_dim == 4:
            features = np.ones([point_set.shape[0], 1])
            features = np.concatenate([features, point_set], axis=1)
        elif self.config.in_features_dim == 7:
            features = np.ones([point_set.shape[0], 1])
            features = np.concatenate([features, point_set, normals], axis=1)

        if self.classification:
            # manually convert numpy array to Tensor.
            # cls = torch.from_numpy(cls) - 1  # change to 0-based labels
            # cls = torch.from_numpy(np.array([cls]))
            # dict_inputs = segmentation_inputs(point_set, features, cls, self.config)
            # return dict_inputs
            return point_set, features, cls
        else:
            # manually convert numpy array to Tensor.
            # seg = torch.from_numpy(seg) - 1  # change to 0-based labels
            # dict_inputs = segmentation_inputs(point_set, features, seg, self.config)
            # return dict_inputs
            seg = seg - 1
            return point_set, features, seg
Beispiel #17
0
def feature_(pcd):
    o3d.estimate_normals(
        pcd,
        o3d.KDTreeSearchParamHybrid(radius=param['radius_normal'],
                                    max_nn=param['maxnn_normal']))
    ft = o3d.compute_fpfh_feature(
        pcd,
        o3d.KDTreeSearchParamHybrid(radius=param['radius_feature'],
                                    max_nn=param['maxnn_feature']))
    return ft
Beispiel #18
0
    def estimate_normals_open3d(self, neighborhood, search_radius):
        """ Estimate normals based on PCA, because we cannot vectorize we rely on the c++ open3d backend """
        pcd_normal = open3d.PointCloud()
        pcd_normal.points = open3d.Vector3dVector(copy.deepcopy(self.points))
        open3d.estimate_normals(
            pcd_normal,
            open3d.KDTreeSearchParamHybrid(radius=search_radius,
                                           max_nn=neighborhood))
        self.normals = numpy.asarray(pcd_normal.normals)

        return
Beispiel #19
0
def _estimate_pointcloud_normals_unorganized(points):
    assert points.shape[1] == 3

    nonnan = ~np.isnan(points).any(axis=1)
    points_open3d = open3d.PointCloud()
    points_open3d.points = open3d.Vector3dVector(points[nonnan])
    open3d.estimate_normals(
        points_open3d,
        search_param=open3d.KDTreeSearchParamHybrid(radius=0.1, max_nn=30),
    )
    return np.asarray(points_open3d.normals)
Beispiel #20
0
def predict(color_input, color_bg, depth_input, depth_bg, camera_intrinsic):
    ## scale the images to the proper value
    color_input = np.asarray(color_input, dtype=np.float32) / 255
    color_bg = np.asarray(color_bg, dtype=np.float32) / 255
    depth_input = np.asarray(depth_input, dtype=np.float64) / 10000
    depth_bg = np.asarray(depth_bg, dtype=np.float64) / 10000

    ## get foreground mask
    frg_mask_color = ~(np.sum(abs(color_input-color_bg) < 0.3, axis=2) == 3)
    frg_mask_depth = np.logical_and((abs(depth_input-depth_bg) > 0.02), (depth_bg != 0))
    foreground_mask = np.logical_or(frg_mask_color, frg_mask_depth)

    ## project depth to camera space
    pix_x, pix_y = np.meshgrid(np.arange(640), np.arange(480))
    cam_x = (pix_x - camera_intrinsic[0][2]) * depth_input/camera_intrinsic[0][0]
    cam_y = (pix_y - camera_intrinsic[1][2]) * depth_input/camera_intrinsic[1][1]
    cam_z = depth_input

    depth_valid = (np.logical_and(foreground_mask, cam_z) != 0)
    input_points = np.array([cam_x[depth_valid], cam_y[depth_valid], cam_z[depth_valid]]).transpose()

    ## get the foreground point cloud
    pcd = open3d.PointCloud()
    pcd.points = open3d.Vector3dVector(input_points)
    open3d.estimate_normals(pcd, search_param=open3d.KDTreeSearchParamHybrid(radius = 0.1, max_nn = 50))
    pcd_normals = np.asarray(pcd.normals)

    ## flip normals to point towards sensor
    center = [0,0,0]
    for k in range(input_points.shape[0]):
        p1 = center - input_points[k][:]
        p2 = pcd_normals[k][:]
        x = np.cross(p1,p2)
        angle = np.arctan2(np.sqrt((x*x).sum()), p1.dot(p2.transpose()))
        if (angle > -np.pi/2 and angle < np.pi/2):
            pcd_normals[k][:] = -pcd_normals[k][:]

    ## reproject the normals back to image plane
    pix_x = np.round((input_points[:,0] * camera_intrinsic[0][0] / input_points[:,2] + camera_intrinsic[0][2]))
    pix_y = np.round((input_points[:,1] * camera_intrinsic[1][1] / input_points[:,2] + cam_intrinsic[1][2]))

    surface_normals_map = np.zeros(color_input.shape)
    for n, (x,y) in enumerate(zip(pix_x, pix_y)):
        x,y = int(x), int(y)
        surface_normals_map[y,x,0] = pcd_normals[n,0]
        surface_normals_map[y,x,1] = pcd_normals[n,1]
        surface_normals_map[y,x,2] = pcd_normals[n,2]
        
    ## Compute standard deviation of local normals
    mean_std_norms = np.mean(stdev_filter(surface_normals_map, 25), axis=2)
    affordance_map = 1 - mean_std_norms/mean_std_norms.max()
    affordance_map[~depth_valid] = 0

    return surface_normals_map, affordance_map
def registration(voxel_size, threshold, source_points, target_points,
                 trans_init):
    source, target, source_down, target_down, source_fpfh, target_fpfh = \
        prepare_dataset(voxel_size, source_points, target_points, trans_init)

    result_ransac = execute_global_registration(source_down, target_down,
                                                source_fpfh, target_fpfh,
                                                voxel_size)

    # perform ICP registration
    o3d.estimate_normals(source,
                         search_param=o3d.KDTreeSearchParamHybrid(radius=1,
                                                                  max_nn=30))
    o3d.estimate_normals(target,
                         search_param=o3d.KDTreeSearchParamHybrid(radius=1,
                                                                  max_nn=30))
    trans_init = result_ransac.transformation

    # print("Initial alignment")
    evaluation = o3d.registration.evaluate_registration(
        source, target, threshold, trans_init)
    # print(evaluation)

    # print("Apply point-to-point ICP")
    reg_p2p = o3d.registration.registration_icp(
        source, target, threshold, trans_init,
        o3d.registration.TransformationEstimationPointToPoint())
    # print(reg_p2p)
    # print("Transformation is:")
    # print(reg_p2p.transformation)
    # draw_registration_result(source, target, reg_p2p.transformation)

    # second ICP
    trans_init2 = reg_p2p.transformation
    # print("Apply 2nd point-to-point ICP")
    reg_p2p2 = o3d.registration.registration_icp(
        source, target, threshold, trans_init2,
        o3d.registration.TransformationEstimationPointToPoint())
    # draw_registration_result(source, target, reg_p2p2.transformation)

    # third ICP
    trans_init3 = reg_p2p2.transformation
    # print("Apply 3rd point-to-point ICP")
    reg_p2p3 = o3d.registration.registration_icp(
        source, target, threshold, trans_init3,
        o3d.registration.TransformationEstimationPointToPoint())
    # print('checking rms')
    # print(reg_p2p3.inlier_rmse)
    # print(reg_p2p3)
    # print("Transformation 3 is:")
    # print(reg_p2p3.transformation)
    # print("")
    # draw_registration_result(source, target, reg_p2p3.transformation)
    return reg_p2p3
Beispiel #22
0
 def setUp(self):
     pcd = o3.read_point_cloud('data/horse.ply')
     pcd = o3.voxel_down_sample(pcd, voxel_size=0.01)
     o3.estimate_normals(pcd,
                         search_param=o3.geometry.KDTreeSearchParamHybrid(
                             radius=0.01, max_nn=10))
     o3.orient_normals_to_align_with_direction(pcd)
     self._source = np.asarray(pcd.points)
     rot = trans.euler_matrix(*np.random.uniform(0.0, np.pi / 4, 3))
     self._tf = tf.RigidTransformation(rot[:3, :3], np.zeros(3))
     self._target = self._tf.transform(self._source)
     self._target_normals = np.asarray(np.dot(pcd.normals, self._tf.rot.T))
Beispiel #23
0
def visualize_trajectory(numDirs=8, dists=[.1, .3, .5], steps=10):

    dino = DinoAgent()
    center = dino.center 
    print("dino is located at: " + str(center))
    offset_vals = [-.1, .1]
    offsets = [np.array([i, j, k]) for i in offset_vals for j in offset_vals for k in offset_vals] + [np.array([0, 0, 0])]
    centers = [np.array(center) + offset for offset in offsets]


    ptcloud, sc, start_view = dino.visualize_ptcloud('greedy', 80)
    print("start view is: " + str(start_view))

    params = getCandidateParams(start_view, numDirs, centers, dists)
    print("params: " + str(params))
    cand_view_list = [dirToView(d, start_view, c, dist) for (d, c, dist) in params]

    cand_traj_pts = cand_view_list
    for (d, c, dist) in params:
        print("param: " + str((d, c, dist)))
        int_pts = interpolateTarget(start_view, d, steps, dist, c)
        pts_list = [int_pts.get() for i in range(int_pts.qsize())]
        print("points: " + str(pts_list))
        cand_traj_pts += pts_list

    (greedy_dir, greedy_c, greedy_dist) = chooseGreedyAction(start_view, dists, numDirs, sc, centers)
    print("greedy params: d {}, c {}, dist {}".format(greedy_dir, greedy_c, greedy_dist))
    greedy_traj = interpolateTarget(start_view, greedy_dir, steps, greedy_dist, greedy_c)
    greedy_pts_list = [greedy_traj.get() for i in range(greedy_traj.qsize())]

    pcd = open3d.geometry.PointCloud()
    pcd.points = open3d.Vector3dVector(ptcloud[:, 0:3])
    pcd.paint_uniform_color([1,0.706,0])

    total_ptcloud = np.vstack((ptcloud[:, 0:3], np.array(cand_traj_pts)[:, 0:3]))
    tpcd = open3d.geometry.PointCloud()
    tpcd.points = open3d.Vector3dVector(total_ptcloud)
    open3d.estimate_normals(tpcd, search_param = open3d.KDTreeSearchParamHybrid(radius = 0.1, max_nn = 100))
    open3d.orient_normals_to_align_with_direction(tpcd)
    tpcd.paint_uniform_color([1,0.706,0])

    greedy_pcd = open3d.geometry.PointCloud()
    greedy_pcd.points = open3d.Vector3dVector(np.array(greedy_pts_list)[:, 0:3])
    greedy_pcd.paint_uniform_color([0, 1, 1])

    traj_pcd = open3d.geometry.PointCloud()
    
    traj_pcd.points = open3d.Vector3dVector(np.array(cand_traj_pts)[:, 0:3])
    traj_pcd.paint_uniform_color([1,0.706,0])
    open3d.visualization.draw_geometries([pcd, greedy_pcd])

    open3d.write_point_cloud("trajectory_viz.ply", tpcd)
Beispiel #24
0
def load_pcds(path, downsample=True, interval=1):
    """
    load pointcloud by path and down samle (if True) based on voxel_size 

    """

    global voxel_size, camera_intrinsics, plane_equation
    pcds = []

    for Filename in trange(
            len(glob.glob1(path + "JPEGImages", "*.jpg")) / interval):
        img_file = path + 'JPEGImages/%s.jpg' % (Filename * interval)

        cad = cv2.imread(img_file)
        cad = cv2.cvtColor(cad, cv2.COLOR_BGR2RGB)
        depth_file = path + 'depth/%s.png' % (Filename * interval)
        reader = png.Reader(depth_file)
        pngdata = reader.read()
        depth = np.array(map(np.uint16, pngdata[2]))
        mask = depth.copy()
        depth = convert_depth_frame_to_pointcloud(depth, camera_intrinsics)

        aruco_center = get_aruco_center(cad, depth)
        # remove plane and anything underneath the plane from the pointcloud
        sol = findplane(cad, depth)
        distance = point_to_plane(depth, sol)
        sol = fitplane(sol, depth[(distance > -0.01) & (distance < 0.01)])
        # record the plane equation on the first frame for point projections
        if Filename == 0:
            plane_equation = sol
        distance = point_to_plane(depth, sol)
        mask[distance < 0.002] = 0

        # use statistical outlier remover to remove isolated noise from the scene
        distance2center = np.linalg.norm(depth - aruco_center, axis=2)
        mask[distance2center > MAX_RADIUS] = 0
        source = open3d.PointCloud()
        source.points = open3d.Vector3dVector(depth[mask > 0])
        source.colors = open3d.Vector3dVector(cad[mask > 0])

        cl, ind = open3d.statistical_outlier_removal(source,
                                                     nb_neighbors=500,
                                                     std_ratio=0.5)

        if downsample == True:
            pcd_down = open3d.voxel_down_sample(cl, voxel_size=voxel_size)
            open3d.estimate_normals(
                pcd_down, KDTreeSearchParamHybrid(radius=0.002 * 2, max_nn=30))
            pcds.append(pcd_down)
        else:
            pcds.append(cl)
    return pcds
Beispiel #25
0
def depth_image_to_pointcloud(img, filter = [None, None, None], estimate_normals=True):
    depth_image_preprocessed = preprocess_image(img)
    mat = depth_image_to_scientific_coordinates(depth_image_preprocessed, flatten=True, filter=filter)
    if mat.shape[0] == 0:
        print('No points left after filtering.')
        return None
    mat = scientific_coordinates_to_standard(mat)
    print_pointcloud_mat_stats(mat)
    pcd = open3d.PointCloud()
    pcd.points = open3d.Vector3dVector(mat)
    if estimate_normals:
        open3d.estimate_normals(pcd, search_param=open3d.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
    return pcd
Beispiel #26
0
def preprocess_point_cloud(pcd, voxel_size):
    pn.estimate_normals(pcd)

    pcd_down = pn.voxel_down_sample(pcd, voxel_size)

    radius_normal = voxel_size * 2
    pn.estimate_normals(
        pcd_down, pn.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=10))

    radius_feature = voxel_size * 5
    pcd_fpfh = pn.compute_fpfh_feature(
        pcd_down, pn.KDTreeSearchParamHybrid(radius=radius_feature, max_nn=50))
    return pcd_down, pcd_fpfh
Beispiel #27
0
def icp_reweighted(source,
                   target,
                   sigma=0.01,
                   max_iter=100,
                   stopping_threshold=1e-4):
    """ If target has no normals, estimate """
    if np.array(target.normals).shape[0] == 0:
        search_param = o3d.geometry.KDTreeSearchParamHybrid(radius=0.2,
                                                            max_nn=30)
        o3d.estimate_normals(target, search_param=search_param)

    tree = NN(n_neighbors=1, algorithm='kd_tree', n_jobs=10)
    tree = tree.fit(np.array(target.points))
    n = np.array(source.points).shape[0]
    normals = np.array(target.normals)
    points = np.array(target.points)
    weights = np.zeros(n)
    errors = []
    transform = np.eye(4)

    for itr in range(max_iter):
        p = np.array(source.points)
        R, trans = gutil.unpack(transform)
        p = (R.dot(p.T) + trans.reshape((3, 1))).T
        _, indices = tree.kneighbors(p)
        """ (r X pi + pi + t - qi)^T ni """
        """( <r, (pi X ni)> + <t, ni> + <pi-qi, ni> )^2"""
        """ (<(r; t), hi> + di)^2 """
        nor = normals[indices[:, 0], :]
        q = points[indices[:, 0], :]
        d = np.sum(np.multiply(p - q, nor), axis=1)  #[n]
        h = np.zeros((n, 6))
        h[:, :3] = np.cross(p, nor)
        h[:, 3:] = nor
        weight = (sigma**2) / (np.square(d) + sigma**2)
        H = np.multiply(h.T, weight).dot(h)
        g = -h.T.dot(np.multiply(d, weight))
        delta = np.linalg.solve(H, g)
        errors = np.abs(d)
        print('iter=%d, delta=%f, mean error=%f, median error=%f' %
              (itr, np.linalg.norm(delta,
                                   2), np.mean(errors), np.median(errors)))
        if np.linalg.norm(delta, 2) < stopping_threshold:
            break
        trans = delta[3:]
        R = gutil.rodrigues(delta[:3])
        T = gutil.pack(R, trans)
        transform = T.dot(transform)

    return transform
Beispiel #28
0
def load_pcd(data_path, cat):
    # load meshes
    ply_path = os.path.join(data_path, 'meshes', 'obj_' + cat + '.ply')
    model_vsd = ply_loader.load_ply(ply_path)
    pcd_model = open3d.PointCloud()
    pcd_model.points = open3d.Vector3dVector(model_vsd['pts'])
    open3d.estimate_normals(pcd_model,
                            search_param=open3d.KDTreeSearchParamHybrid(
                                radius=0.1, max_nn=30))
    # open3d.draw_geometries([pcd_model])
    model_vsd_mm = copy.deepcopy(model_vsd)
    model_vsd_mm['pts'] = model_vsd_mm['pts'] * 1000.0
    #pcd_model = open3d.read_point_cloud(ply_path)

    return pcd_model, model_vsd, model_vsd_mm
def preprocess_point_cloud(pcd, voxel_size):
    print(":: Downsample with a voxel size %.3f." % voxel_size)
    pcd_down = op3.voxel_down_sample(pcd, voxel_size)

    radius_normal = voxel_size * 2
    print(":: Estimate normal with search radius %.3f." % radius_normal)
    op3.estimate_normals(
        pcd_down, op3.KDTreeSearchParamHybrid(radius=radius_normal, max_nn=30))

    radius_feature = voxel_size * 5
    print(":: Compute FPFH feature with search radius %.3f." % radius_feature)
    pcd_fpfh = op3.compute_fpfh_feature(
        pcd_down, op3.KDTreeSearchParamHybrid(radius=radius_feature,
                                              max_nn=100))
    return pcd_down, pcd_fpfh
Beispiel #30
0
def merge_pointclouds(pcd1, pcd2, keep_colors=True, estimate_normals=True):
    if pcd1 is None and pcd2 is None:
        return None
    if pcd1 is None:
        return pcd2
    if pcd2 is None:
        return pcd1

    pcd = open3d.PointCloud()
    pcd.points = open3d.Vector3dVector(np.vstack((np.asarray(pcd1.points), np.asarray(pcd2.points))))
    if keep_colors:
        pcd.colors = open3d.Vector3dVector(np.vstack((np.asarray(pcd1.colors), np.asarray(pcd2.colors))))
    if estimate_normals:
        open3d.estimate_normals(pcd, search_param=open3d.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
    return pcd