def test_auto_file_format():
    """Test saving and loading pointclouds via the pcl loader"""

    # make and save a pointcloud
    pc = pcl.PointCloud(10)
    pc_arr = np.asarray(pc)
    pc_arr[:] = np.random.randn(*pc_arr.shape)

    with NamedTemporaryFile(suffix='.ply') as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.pcd') as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.las') as f:
        utils.save(pc, f.name, format="PLY")
        pc2 = utils.load(f.name, format="PLY")
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix='.las') as f:
        utils.save(pc, f.name, format="PCD")
        pc2 = utils.load(f.name, format="PCD")
        _compare(pc, pc2)
Exemple #2
0
def test_auto_file_format():
    """Test saving and loading pointclouds via the pcl loader"""

    # make and save a pointcloud
    pc = pcl.PointCloud(10)
    pc_arr = np.asarray(pc)
    pc_arr[:] = np.random.randn(*pc_arr.shape)

    with NamedTemporaryFile(suffix=".ply") as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".pcd") as f:
        utils.save(pc, f.name)
        pc2 = utils.load(f.name)
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".las") as f:
        utils.save(pc, f.name, format="PLY")
        pc2 = utils.load(f.name, format="PLY")
        _compare(pc, pc2)

    with NamedTemporaryFile(suffix=".las") as f:
        utils.save(pc, f.name, format="PCD")
        pc2 = utils.load(f.name, format="PCD")
        _compare(pc, pc2)
Exemple #3
0
def test_read_write():
    """ Test read and write LAS files functionality"""
    filename = "./testIO.las"

    # make and save a pointcloud
    pc1 = pcl.PointCloud(10)
    pc1_arr = np.asarray(pc1)
    pc1_arr[:] = np.random.randn(*pc1_arr.shape)
    utils.save(pc1, filename)

    # reload it
    pc2 = utils.load(filename)

    _compare(pc1, pc2)

    os.remove(filename)
def test_read_write():
    ''' Test read and write LAS files functionality'''
    filename = './testIO.las'

    # make and save a pointcloud
    pc1 = pcl.PointCloud(10)
    pc1_arr = np.asarray(pc1)
    pc1_arr[:] = np.random.randn(*pc1_arr.shape)
    utils.save(pc1, filename)

    # reload it
    pc2 = utils.load(filename)

    _compare(pc1, pc2)

    os.remove(filename)
        Initial_scale = float(args['-s'])
    except:
        Initial_scale = None

    assert os.path.exists(sourcefile), sourcefile + ' does not exist'
    assert os.path.exists(drivemapfile), drivemapfile + ' does not exist'
    assert os.path.exists(footprintcsv), footprintcsv + ' does not exist'

    #####
    # Setup * the low-res drivemap
    #       * footprint
    #       * pointcloud
    #       * up-vector

    log("Reading drivemap", drivemapfile)
    drivemap = load(drivemapfile)
    force_srs(drivemap, srs="EPSG:32633")

    log("Reading footprint", footprintcsv)
    footprint = load(footprintcsv)
    force_srs(footprint, srs="EPSG:32633")
    set_srs(footprint, same_as=drivemap)

    log("Reading object", sourcefile)
    pointcloud = load(sourcefile)

    Up = None
    try:
        with open(up_file) as f:
            dic = json.load(f)
        Up = np.array(dic['estimatedUpDirection'])
Exemple #6
0
#!/usr/bin/env python
"""Segment points by colour from a pointcloud file and saves all reddish points
target pointcloud file. Autodectects ply, pcd and las files.

Usage: redstickdetection.py  [-h] <infile> <outfile>
"""

from docopt import docopt
from patty.segmentation.segRedStick import get_red_mask
from patty.utils import extract_mask, load, save

if __name__ == '__main__':
    args = docopt(__doc__)

    pc = load(args['<infile>'])
    red_pc = extract_mask(pc, get_red_mask(pc))
    save(red_pc, args['<outfile>'])
Exemple #7
0
import sys

from patty.segmentation import segment_dbscan
from patty.utils import load, save

if __name__ == '__main__':
    args = docopt(__doc__, sys.argv[1:])

    rgb_weight = float(args['--rgb_weight'])
    eps = float(args['<epsilon>'])
    minpoints = int(args['<minpoints>'])

    # Kludge to get a proper exception for file not found
    # (PCL will report "problem parsing header!").
    with open(args['<file>']) as _:
        pc = load(args['<file>'])
    print("%d points" % len(pc))

    clusters = segment_dbscan(pc, epsilon=eps, minpoints=minpoints,
                              rgb_weight=rgb_weight)

    n_outliers = len(pc)
    for i, cluster in enumerate(clusters):
        print("%d points in cluster %d" % (len(cluster), i))
        filename = '%s/cluster%d.%s' % (args['--output_dir'], i,
                                        args['--format'])
        save(cluster, filename)
        n_outliers -= len(cluster)

    print("%d outliers" % n_outliers)
from __future__ import print_function
from docopt import docopt

import numpy as np
from patty.utils import load, save


def csv_read(path):
    return np.genfromtxt(path, dtype=float, delimiter=',')


if __name__ == '__main__':
    args = docopt(__doc__)

    pc = load(args['<source>'])

    try:
        offset = csv_read(args['-o'])
    except:
        offset = None

    try:
        matrix = csv_read(args['-r'])
        pc.rotate(matrix, origin=offset)
    except Exception as e:
        print('Problem with rotate: ', e)

    try:
        factor = csv_read(args['-s'])
        pc.scale(factor, origin=offset)
Exemple #9
0
        Initial_scale = float(args['-s'])
    except:
        Initial_scale = None

    assert os.path.exists(sourcefile), sourcefile + ' does not exist'
    assert os.path.exists(drivemapfile), drivemapfile + ' does not exist'
    assert os.path.exists(footprintcsv), footprintcsv + ' does not exist'

    #####
    # Setup * the low-res drivemap
    #       * footprint
    #       * pointcloud
    #       * up-vector

    log("Reading drivemap", drivemapfile)
    drivemap = load(drivemapfile)
    force_srs(drivemap, srs="EPSG:32633")

    log("Reading footprint", footprintcsv)
    footprint = load(footprintcsv)
    force_srs(footprint, srs="EPSG:32633")
    set_srs(footprint, same_as=drivemap)

    log("Reading object", sourcefile)
    pointcloud = load(sourcefile)

    Up = None
    try:
        with open(up_file) as f:
            dic = json.load(f)
        Up = np.array(dic['estimatedUpDirection'])
import sys

from patty.segmentation import segment_dbscan
from patty.utils import load, save

if __name__ == '__main__':
    args = docopt(__doc__, sys.argv[1:])

    rgb_weight = float(args['--rgb_weight'])
    eps = float(args['<epsilon>'])
    minpoints = int(args['<minpoints>'])

    # Kludge to get a proper exception for file not found
    # (PCL will report "problem parsing header!").
    with open(args['<file>']) as _:
        pc = load(args['<file>'])
    print("%d points" % len(pc))

    clusters = segment_dbscan(pc,
                              epsilon=eps,
                              minpoints=minpoints,
                              rgb_weight=rgb_weight)

    n_outliers = len(pc)
    for i, cluster in enumerate(clusters):
        print("%d points in cluster %d" % (len(cluster), i))
        filename = '%s/cluster%d.%s' % (args['--output_dir'], i,
                                        args['--format'])
        save(cluster, filename)
        n_outliers -= len(cluster)