コード例 #1
0
    def setUp(self):
        self.useLocal = False

        if self.useLocal:
            self.tempdir = tempdir = '.'
        else:
            self.tempdir = tempdir = mkdtemp(prefix='patty-analytics')

        self.drivemapLas = os.path.join(tempdir, 'testDriveMap.las')
        self.sourcelas = os.path.join(tempdir, 'testSource.las')
        self.footprint_csv = os.path.join(tempdir, 'testFootprint.csv')
        self.foutlas = os.path.join(tempdir, 'testOutput.las')

        self.min = -10
        self.max = 10
        self.num_rows = 1000

        # Create plane with a pyramid
        dm_pct = 0.5
        dm_rows = np.round(self.num_rows * dm_pct)
        dm_min = self.min * dm_pct
        dm_max = self.max * dm_pct

        delta = dm_max / dm_rows
        shape_side = dm_max - dm_min

        dm_offset = [0, 0, 0]
        self.dense_obj_offset = [3, 2, -(1 + shape_side / 2)]

        # make drivemap
        plane_row = np.linspace(start=self.min,
                                stop=self.max,
                                num=self.num_rows)
        plane_points = cartesian((plane_row, plane_row, [0]))

        shape_points, footprint = make_tri_pyramid_with_base(
            shape_side, delta, dm_offset)
        np.savetxt(self.footprint_csv, footprint, fmt='%.3f', delimiter=',')

        dm_points = np.vstack([plane_points, shape_points])
        plane_grid = np.zeros((dm_points.shape[0], 6), dtype=np.float32)
        plane_grid[:, 0:3] = dm_points

        self.drivemap_pc = pcl.PointCloudXYZRGB(plane_grid)
        self.drivemap_pc = downsample_voxel(self.drivemap_pc,
                                            voxel_size=delta * 20)
        # utils.set_registration(self.drivemap_pc)
        utils.save(self.drivemap_pc, self.drivemapLas)

        # Create a simple pyramid
        dense_grid = np.zeros((shape_points.shape[0], 6), dtype=np.float32)
        dense_grid[:, 0:3] = shape_points + self.dense_obj_offset

        self.source_pc = pcl.PointCloudXYZRGB(dense_grid)
        self.source_pc = downsample_voxel(self.source_pc, voxel_size=delta * 5)
        utils.save(self.source_pc, self.sourcelas)
コード例 #2
0
    def setUp(self):
        self.useLocal = False

        if self.useLocal:
            self.tempdir = tempdir = '.'
        else:
            self.tempdir = tempdir = mkdtemp(prefix='patty-analytics')

        self.drivemapLas = os.path.join(tempdir, 'testDriveMap.las')
        self.sourcelas = os.path.join(tempdir, 'testSource.las')
        self.footprint_csv = os.path.join(tempdir, 'testFootprint.csv')
        self.foutlas = os.path.join(tempdir, 'testOutput.las')

        self.min = -10
        self.max = 10
        self.num_rows = 1000

        # Create plane with a pyramid
        dm_pct = 0.5
        dm_rows = np.round(self.num_rows * dm_pct)
        dm_min = self.min * dm_pct
        dm_max = self.max * dm_pct

        delta = dm_max / dm_rows
        shape_side = dm_max - dm_min

        dm_offset = [0, 0, 0]
        self.dense_obj_offset = [3, 2, -(1 + shape_side / 2)]

        # make drivemap
        plane_row = np.linspace(
            start=self.min, stop=self.max, num=self.num_rows)
        plane_points = cartesian((plane_row, plane_row, [0]))

        shape_points, footprint = make_tri_pyramid_with_base(
            shape_side, delta, dm_offset)
        np.savetxt(self.footprint_csv, footprint, fmt='%.3f', delimiter=',')

        dm_points = np.vstack([plane_points, shape_points])
        plane_grid = np.zeros((dm_points.shape[0], 6), dtype=np.float32)
        plane_grid[:, 0:3] = dm_points

        self.drivemap_pc = pcl.PointCloudXYZRGB(plane_grid)
        self.drivemap_pc = downsample_voxel(self.drivemap_pc,
                                            voxel_size=delta * 20)
        # utils.set_registration(self.drivemap_pc)
        utils.save(self.drivemap_pc, self.drivemapLas)

        # Create a simple pyramid
        dense_grid = np.zeros((shape_points.shape[0], 6), dtype=np.float32)
        dense_grid[:, 0:3] = shape_points + self.dense_obj_offset

        self.source_pc = pcl.PointCloudXYZRGB(dense_grid)
        self.source_pc = downsample_voxel(self.source_pc, voxel_size=delta * 5)
        utils.save(self.source_pc, self.sourcelas)
コード例 #3
0
ファイル: registration.py プロジェクト: NLeSC/PattyAnalytics
def _fine_registration_helper(pointcloud, drivemap, voxelsize=0.05, attempt=0):
    """
    Perform ICP on pointcloud with drivemap, and return convergence indicator.
    Reject large translatoins.

    Returns:
        transf : np.array([4,4])
            transform
        success : Boolean
            if icp was successful
        fitness : float
            sort of sum of square differences, ie. smaller is better
    """
    ####
    # Downsample to speed up
    # use voxel filter to keep evenly distributed spatial extent

    log(" - Downsampling with voxel filter: %s" % voxelsize)
    pc = downsample_voxel(pointcloud, voxelsize)

    ####
    # Clip to drivemap to prevent outliers confusing the ICP algorithm

    log(" - Clipping to drivemap")
    bb = BoundingBox(drivemap)
    z = bb.center[2]
    extracted = extract_mask(pc, [bb.contains([point[0], point[1], z])
                                  for point in pc])

    log(" - Remaining points: %s" % len(extracted))

    ####
    # GICP

    converged, transf, estimate, fitness = gicp(extracted, drivemap)

    ####
    # Dont accept large translations

    translation = transf[0:3, 3]
    if np.dot(translation, translation) > 5 ** 2:
        log(" - Translation too large, considering it a failure.")
        converged = False
        fitness = 1e30
    else:
        log(" - Success, fitness: ", converged, fitness)

    force_srs(estimate, same_as=pointcloud)
    save(estimate, "attempt%s.las" % attempt)

    return transf, converged, fitness
コード例 #4
0
ファイル: boundary.py プロジェクト: NLeSC/PattyAnalytics
def boundary_of_center_object(pc,
                              downsample=None,
                              angle_threshold=0.1,
                              search_radius=0.1,
                              normal_search_radius=0.1):
    """
    Find the boundary of the main object.
    First applies dbscan to find the main object,
    then estimates its footprint by taking the pointcloud boundary.
    Resulting pointcloud has the same SRS and offset as the input.

    Arguments:
        pointcloud : pcl.PointCloud

        downsample : If given, reduce the pointcloud to given percentage
                     values should be in [0,1]

        angle_threshold : float defaults to 0.1

        search_radius : float defaults to 0.1

        normal_search_radius : float defaults to 0.1

    Returns:
        boundary : pcl.PointCloud
    """

    if downsample is not None:
        log(' - Random downsampling factor:', downsample)
        pc = utils.downsample_random(pc, downsample)
    else:
        log(' - Not downsampling')

    # Main noise supression step
    # Find largest clusters, accounting for at least 70% of the pointcloud.
    # Presumably, this is the main object.
    log(' - Starting dbscan on downsampled pointcloud')
    mainobject = get_largest_dbscan_clusters(pc, 0.7, .075, 250)
    save(mainobject, 'mainobject.las')

    boundary = estimate_boundaries(mainobject,
                                   angle_threshold=angle_threshold,
                                   search_radius=search_radius,
                                   normal_search_radius=normal_search_radius)

    boundary = extract_mask(mainobject, boundary)

    if len(boundary) == len(mainobject) or len(boundary) == 0:
        log(' - Cannot find boundary')
        return None

    # project on the xy plane, take 2th percentile height
    points = np.asarray(boundary)
    points[:, 2] = np.percentile(points[:, 2], 2)

    # Secondary noise supression step
    # some cases have multiple objects close to eachother, and we need to
    # filter some out. Assume the object is a single item; perform another
    # dbscan to select the footprint of the main item
    log(' - Starting dbscan on boundary')
    mainboundary = get_largest_dbscan_clusters(boundary, 0.5, .1, 10)

    # Evenly space out the points
    mainboundary = utils.downsample_voxel(mainboundary, voxel_size=0.1)

    utils.force_srs(mainboundary, same_as=pc)

    return mainboundary