def setUp(self):
        self.useLocal = False

        if self.useLocal:
            self.tempdir = tempdir = '.'
        else:
            self.tempdir = tempdir = mkdtemp(prefix='patty-analytics')

        self.drivemapLas = os.path.join(tempdir, 'testDriveMap.las')
        self.sourcelas = os.path.join(tempdir, 'testSource.las')
        self.footprint_csv = os.path.join(tempdir, 'testFootprint.csv')
        self.foutlas = os.path.join(tempdir, 'testOutput.las')

        self.min = -10
        self.max = 10
        self.num_rows = 1000

        # Create plane with a pyramid
        dm_pct = 0.5
        dm_rows = np.round(self.num_rows * dm_pct)
        dm_min = self.min * dm_pct
        dm_max = self.max * dm_pct

        delta = dm_max / dm_rows
        shape_side = dm_max - dm_min

        dm_offset = [0, 0, 0]
        self.dense_obj_offset = [3, 2, -(1 + shape_side / 2)]

        # make drivemap
        plane_row = np.linspace(start=self.min,
                                stop=self.max,
                                num=self.num_rows)
        plane_points = cartesian((plane_row, plane_row, [0]))

        shape_points, footprint = make_tri_pyramid_with_base(
            shape_side, delta, dm_offset)
        np.savetxt(self.footprint_csv, footprint, fmt='%.3f', delimiter=',')

        dm_points = np.vstack([plane_points, shape_points])
        plane_grid = np.zeros((dm_points.shape[0], 6), dtype=np.float32)
        plane_grid[:, 0:3] = dm_points

        self.drivemap_pc = pcl.PointCloudXYZRGB(plane_grid)
        self.drivemap_pc = downsample_voxel(self.drivemap_pc,
                                            voxel_size=delta * 20)
        # utils.set_registration(self.drivemap_pc)
        utils.save(self.drivemap_pc, self.drivemapLas)

        # Create a simple pyramid
        dense_grid = np.zeros((shape_points.shape[0], 6), dtype=np.float32)
        dense_grid[:, 0:3] = shape_points + self.dense_obj_offset

        self.source_pc = pcl.PointCloudXYZRGB(dense_grid)
        self.source_pc = downsample_voxel(self.source_pc, voxel_size=delta * 5)
        utils.save(self.source_pc, self.sourcelas)
Пример #2
0
    def setUp(self):
        self.useLocal = False

        if self.useLocal:
            self.tempdir = tempdir = '.'
        else:
            self.tempdir = tempdir = mkdtemp(prefix='patty-analytics')

        self.drivemapLas = os.path.join(tempdir, 'testDriveMap.las')
        self.sourcelas = os.path.join(tempdir, 'testSource.las')
        self.footprint_csv = os.path.join(tempdir, 'testFootprint.csv')
        self.foutlas = os.path.join(tempdir, 'testOutput.las')

        self.min = -10
        self.max = 10
        self.num_rows = 1000

        # Create plane with a pyramid
        dm_pct = 0.5
        dm_rows = np.round(self.num_rows * dm_pct)
        dm_min = self.min * dm_pct
        dm_max = self.max * dm_pct

        delta = dm_max / dm_rows
        shape_side = dm_max - dm_min

        dm_offset = [0, 0, 0]
        self.dense_obj_offset = [3, 2, -(1 + shape_side / 2)]

        # make drivemap
        plane_row = np.linspace(
            start=self.min, stop=self.max, num=self.num_rows)
        plane_points = cartesian((plane_row, plane_row, [0]))

        shape_points, footprint = make_tri_pyramid_with_base(
            shape_side, delta, dm_offset)
        np.savetxt(self.footprint_csv, footprint, fmt='%.3f', delimiter=',')

        dm_points = np.vstack([plane_points, shape_points])
        plane_grid = np.zeros((dm_points.shape[0], 6), dtype=np.float32)
        plane_grid[:, 0:3] = dm_points

        self.drivemap_pc = pcl.PointCloudXYZRGB(plane_grid)
        self.drivemap_pc = downsample_voxel(self.drivemap_pc,
                                            voxel_size=delta * 20)
        # utils.set_registration(self.drivemap_pc)
        utils.save(self.drivemap_pc, self.drivemapLas)

        # Create a simple pyramid
        dense_grid = np.zeros((shape_points.shape[0], 6), dtype=np.float32)
        dense_grid[:, 0:3] = shape_points + self.dense_obj_offset

        self.source_pc = pcl.PointCloudXYZRGB(dense_grid)
        self.source_pc = downsample_voxel(self.source_pc, voxel_size=delta * 5)
        utils.save(self.source_pc, self.sourcelas)
Пример #3
0
def _fine_registration_helper(pointcloud, drivemap, voxelsize=0.05, attempt=0):
    """
    Perform ICP on pointcloud with drivemap, and return convergence indicator.
    Reject large translatoins.

    Returns:
        transf : np.array([4,4])
            transform
        success : Boolean
            if icp was successful
        fitness : float
            sort of sum of square differences, ie. smaller is better
    """
    ####
    # Downsample to speed up
    # use voxel filter to keep evenly distributed spatial extent

    log(" - Downsampling with voxel filter: %s" % voxelsize)
    pc = downsample_voxel(pointcloud, voxelsize)

    ####
    # Clip to drivemap to prevent outliers confusing the ICP algorithm

    log(" - Clipping to drivemap")
    bb = BoundingBox(drivemap)
    z = bb.center[2]
    extracted = extract_mask(pc, [bb.contains([point[0], point[1], z])
                                  for point in pc])

    log(" - Remaining points: %s" % len(extracted))

    ####
    # GICP

    converged, transf, estimate, fitness = gicp(extracted, drivemap)

    ####
    # Dont accept large translations

    translation = transf[0:3, 3]
    if np.dot(translation, translation) > 5 ** 2:
        log(" - Translation too large, considering it a failure.")
        converged = False
        fitness = 1e30
    else:
        log(" - Success, fitness: ", converged, fitness)

    force_srs(estimate, same_as=pointcloud)
    save(estimate, "attempt%s.las" % attempt)

    return transf, converged, fitness
Пример #4
0
def test_auto_file_format():
    """Test saving and loading pointclouds via the pcl loader"""

    # make and save a pointcloud
    pc = pcl.PointCloud(10)
    pc_arr = np.asarray(pc)
    pc_arr[:] = np.random.randn(*pc_arr.shape)

    with NamedTemporaryFile(suffix=".ply") as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".pcd") as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".las") as f:
        utils.save(pc, f.name, format="PLY")
        pc2 = utils.load(f.name, format="PLY")
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".las") as f:
        utils.save(pc, f.name, format="PCD")
        pc2 = utils.load(f.name, format="PCD")
        _compare(pc, pc2)
Пример #5
0
def test_auto_file_format():
    """Test saving and loading pointclouds via the pcl loader"""

    # make and save a pointcloud
    pc = pcl.PointCloud(10)
    pc_arr = np.asarray(pc)
    pc_arr[:] = np.random.randn(*pc_arr.shape)

    with NamedTemporaryFile(suffix='.ply') as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.pcd') as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.las') as f:
        utils.save(pc, f.name, format="PLY")
        pc2 = utils.load(f.name, format="PLY")
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.las') as f:
        utils.save(pc, f.name, format="PCD")
        pc2 = utils.load(f.name, format="PCD")
        _compare(pc, pc2)
Пример #6
0
def test_read_write():
    """ Test read and write LAS files functionality"""
    filename = "./testIO.las"

    # make and save a pointcloud
    pc1 = pcl.PointCloud(10)
    pc1_arr = np.asarray(pc1)
    pc1_arr[:] = np.random.randn(*pc1_arr.shape)
    utils.save(pc1, filename)

    # reload it
    pc2 = utils.load(filename)

    _compare(pc1, pc2)

    os.remove(filename)
Пример #7
0
def test_read_write():
    ''' Test read and write LAS files functionality'''
    filename = './testIO.las'

    # make and save a pointcloud
    pc1 = pcl.PointCloud(10)
    pc1_arr = np.asarray(pc1)
    pc1_arr[:] = np.random.randn(*pc1_arr.shape)
    utils.save(pc1, filename)

    # reload it
    pc2 = utils.load(filename)

    _compare(pc1, pc2)

    os.remove(filename)
Пример #8
0
    force_srs(drivemap, srs="EPSG:32633")

    log("Reading footprint", footprintcsv)
    footprint = load(footprintcsv)
    force_srs(footprint, srs="EPSG:32633")
    set_srs(footprint, same_as=drivemap)

    log("Reading object", sourcefile)
    pointcloud = load(sourcefile)

    Up = None
    try:
        with open(up_file) as f:
            dic = json.load(f)
        Up = np.array(dic['estimatedUpDirection'])
        log("Reading up_file", up_file)
    except:
        log("Cannot parse upfile, skipping")

    initial_registration(pointcloud,
                         Up,
                         drivemap,
                         trust_up=Trust_up,
                         initial_scale=Initial_scale)
    save(pointcloud, "initial.las")
    center = coarse_registration(pointcloud, drivemap, footprint, Downsample)
    save(pointcloud, "coarse.las")
    fine_registration(pointcloud, drivemap, center, voxelsize=Voxel)

    save(pointcloud, foutLas)
Пример #9
0
#!/usr/bin/env python
"""Segment points by colour from a pointcloud file and saves all reddish points
target pointcloud file. Autodectects ply, pcd and las files.

Usage: redstickdetection.py  [-h] <infile> <outfile>
"""

from docopt import docopt
from patty.segmentation.segRedStick import get_red_mask
from patty.utils import extract_mask, load, save

if __name__ == '__main__':
    args = docopt(__doc__)

    pc = load(args['<infile>'])
    red_pc = extract_mask(pc, get_red_mask(pc))
    save(red_pc, args['<outfile>'])
Пример #10
0
import sys

from patty.segmentation import segment_dbscan
from patty.utils import load, save

if __name__ == '__main__':
    args = docopt(__doc__, sys.argv[1:])

    rgb_weight = float(args['--rgb_weight'])
    eps = float(args['<epsilon>'])
    minpoints = int(args['<minpoints>'])

    # Kludge to get a proper exception for file not found
    # (PCL will report "problem parsing header!").
    with open(args['<file>']) as _:
        pc = load(args['<file>'])
    print("%d points" % len(pc))

    clusters = segment_dbscan(pc, epsilon=eps, minpoints=minpoints,
                              rgb_weight=rgb_weight)

    n_outliers = len(pc)
    for i, cluster in enumerate(clusters):
        print("%d points in cluster %d" % (len(cluster), i))
        filename = '%s/cluster%d.%s' % (args['--output_dir'], i,
                                        args['--format'])
        save(cluster, filename)
        n_outliers -= len(cluster)

    print("%d outliers" % n_outliers)
Пример #11
0
#!/usr/bin/env python
"""Convert a pointcloud file from one format to another.

This procedure looses any information about the point normals, and the
geographic projection used. It keeps color information. It recognises
PLY, PCD and LAS files.

Usage:
  convert.py [-h] <infile> <outfile>
"""

from patty.utils import load, save
from docopt import docopt

if __name__ == '__main__':
    args = docopt(__doc__)

    pc = load(args['<infile>'])
    save(pc, args['<outfile>'])
Пример #12
0
if __name__ == '__main__':
    args = docopt(__doc__)

    pc = load(args['<source>'])

    try:
        offset = csv_read(args['-o'])
    except:
        offset = None

    try:
        matrix = csv_read(args['-r'])
        pc.rotate(matrix, origin=offset)
    except Exception as e:
        print('Problem with rotate: ', e)

    try:
        factor = csv_read(args['-s'])
        pc.scale(factor, origin=offset)
    except Exception as e:
        print('Problem with scale: ', e)

    try:
        vector = csv_read(args['-t'])
        pc.translate(vector)
    except Exception as e:
        print('Problem with translate: ', e)

    save(pc, args['<target>'])
Пример #13
0
#!/usr/bin/env python
"""Apply a statistical outlier filter to a pointcloud.

Usage:
    statfilter.py [-k <kmeans>] [-s <stddev>] <infile> <outfile>

Description:
    -k <kmeans>, --kmeans <kmeans>      K means value [default: 1000].
    -s <stddev>, --stddev <stddev>      Standard deviation cut-off
                                        [default: 2.0].
"""

from docopt import docopt
from patty.utils import load, save


def statfilter(pc, k, s):
    """Apply the PCL statistical outlier filter to a point cloud"""
    fil = pc.make_statistical_outlier_filter()
    fil.set_mean_k(k)
    fil.set_std_dev_mul_thresh(s)
    return fil.filter()


if __name__ == "__main__":
    args = docopt(__doc__)

    pc = load(args['<infile>'])
    filter = statfilter(pc, int(args['--kmeans']), float(args['--stddev']))
    save(filter, args['<outfile>'])
Пример #14
0
def coarse_registration(pointcloud, drivemap, footprint, downsample=None):
    """
    Improve the initial registration.
    Find the proper scale by looking for the red meter sticks, and calculate
    and align the pointcloud's footprint.

    Arguments:
        pointcloud: pcl.PointCloud
                    The high-res object to register.

        drivemap:   pcl.PointCloud
                    A small part of the low-res drivemap on which to register

        footprint:  pcl.PointCloud
                    Pointlcloud containing the objects footprint

        downsample: float, default=None, no resampling
                    Downsample the high-res pointcloud before footprint
                    calculation.
    """
    log("Starting coarse registration")

    ###
    # find redstick scale, and use it if possible
    log(" - Redstick scaling")

    allow_scaling = True

    scale, confidence = get_stick_scale(pointcloud)
    log(" - Red stick scale=%s confidence=%s" % (scale, confidence))

    if (confidence > 0.5):
        log(" - Applying red stick scale")
        pointcloud.scale(1.0 / scale)  # dont care about origin of scaling
        allow_scaling = False
    else:
        log(" - Not applying red stick scale, confidence too low")

    #####
    # find all the points in the drivemap along the footprint
    # use bottom two meters of drivemap (not trees)

    dm_boundary = boundary_of_drivemap(drivemap, footprint)
    dm_bb = BoundingBox(dm_boundary)

    # set footprint height from minimum value,
    # as trees, or high object make the pc.center() too high
    fixed_boundary = clone(footprint)
    fp_array = np.asarray(fixed_boundary)
    fp_array[:, 2] = dm_bb.min[2]

    save(fixed_boundary, "fixed_bound.las")

    #####
    # find all the boundary points of the pointcloud

    loose_boundary = boundary_of_center_object(pointcloud, downsample)
    if loose_boundary is None:
        log(" - boundary estimation failed, using lowest 30 percent of points")
        loose_boundary = boundary_of_lowest_points(pointcloud,
                                                   height_fraction=0.3)

    ####
    # match the pointcloud boundary with the footprint boundary

    log(" - Aligning footprints:")
    rot_matrix, rot_center, scale, translation = align_footprints(
        loose_boundary, fixed_boundary,
        allow_scaling=allow_scaling,
        allow_rotation=True,
        allow_translation=True)

    save(loose_boundary, "aligned_bound.las")

    ####
    # Apply to the main pointcloud

    pointcloud.rotate(rot_matrix, origin=rot_center)
    pointcloud.scale(scale, origin=rot_center)
    pointcloud.translate(translation)
    rot_center += translation

    return rot_center
Пример #15
0
    log("Reading drivemap", drivemapfile)
    drivemap = load(drivemapfile)
    force_srs(drivemap, srs="EPSG:32633")

    log("Reading footprint", footprintcsv)
    footprint = load(footprintcsv)
    force_srs(footprint, srs="EPSG:32633")
    set_srs(footprint, same_as=drivemap)

    log("Reading object", sourcefile)
    pointcloud = load(sourcefile)

    Up = None
    try:
        with open(up_file) as f:
            dic = json.load(f)
        Up = np.array(dic['estimatedUpDirection'])
        log("Reading up_file", up_file)
    except:
        log("Cannot parse upfile, skipping")

    initial_registration(pointcloud, Up, drivemap,
                         trust_up=Trust_up, initial_scale=Initial_scale)
    save(pointcloud, "initial.las")
    center = coarse_registration(pointcloud, drivemap, footprint, Downsample)
    save(pointcloud, "coarse.las")
    fine_registration(pointcloud, drivemap, center, voxelsize=Voxel)

    save(pointcloud, foutLas)
Пример #16
0
from patty.segmentation import segment_dbscan
from patty.utils import load, save

if __name__ == '__main__':
    args = docopt(__doc__, sys.argv[1:])

    rgb_weight = float(args['--rgb_weight'])
    eps = float(args['<epsilon>'])
    minpoints = int(args['<minpoints>'])

    # Kludge to get a proper exception for file not found
    # (PCL will report "problem parsing header!").
    with open(args['<file>']) as _:
        pc = load(args['<file>'])
    print("%d points" % len(pc))

    clusters = segment_dbscan(pc,
                              epsilon=eps,
                              minpoints=minpoints,
                              rgb_weight=rgb_weight)

    n_outliers = len(pc)
    for i, cluster in enumerate(clusters):
        print("%d points in cluster %d" % (len(cluster), i))
        filename = '%s/cluster%d.%s' % (args['--output_dir'], i,
                                        args['--format'])
        save(cluster, filename)
        n_outliers -= len(cluster)

    print("%d outliers" % n_outliers)