Пример #1
0
def load_file(pc3_path):
    logging.info("Loading '" + pc3_path + "'")

    cls_path = pc3_path[:-7] + 'CLS.txt'

    pset = PointSet(pc3_path, cls_path)

    psets = pset.split()

    return [extract_data(ps) for ps in psets]
Пример #2
0
def pre_processor(files, input_queue):
    for file in files:

        logging.info('Loading {}'.format(file))
        pset = PointSet(file)
        psets = pset.split()
        num_batches = int(math.ceil((1.0 * len(psets)) / BATCH_SIZE))

        data = []
        for batch_idx in range(num_batches):
            start_idx = batch_idx * BATCH_SIZE
            end_idx = (batch_idx + 1) * BATCH_SIZE

            for k in range(FLAGS.n_angles):
                batch_raw, batch_data = get_batch(psets, start_idx, end_idx)

                if k == 0:
                    aug_data = batch_data
                else:
                    ang = (1.0 * k) / (1.0 * FLAGS.n_angles) * 2 * np.pi
                    if FLAGS.extra_dims:
                        aug_data = np.concatenate(
                            (provider.rotate_point_cloud_z(
                                batch_data[:, :, 0:3],
                                angle=ang), batch_data[:, :, 3:]),
                            axis=2)
                    else:
                        aug_data = provider.rotate_point_cloud_z(batch_data)

                data.append((batch_raw, aug_data))

        logging.debug('Adding {} to queue'.format(file))
        input_queue.put((pset, data))
        logging.debug('Added {} to queue'.format(file))
    logging.info('Pre-processing finished')
    input_queue.put(None)
    logging.debug('Pre-processing thread finished')
Пример #3
0
def post_processor(output_queue):
    while True:
        out_data = output_queue.get()
        if out_data is None:
            break

        pset = out_data[0]
        all_points = out_data[1]
        all_labels = out_data[2]

        logging.info('Post-processing {}'.format(pset.filename))
        with tempfile.TemporaryDirectory() as tmpdir:
            # Save pset to temp file
            ipath = os.path.join(tmpdir, pset.filename + '_original.las')
            pset.save(ipath)

            # Update pset points
            pset.x = all_points[:, 0]
            pset.y = all_points[:, 1]
            pset.z = all_points[:, 2]
            pset.i = all_points[:, 3]
            pset.r = all_points[:, 4]
            pset.c = np.array([LABEL_MAP[v] for v in all_labels],
                              dtype='uint8')

            # Save all classed points to a new file
            cpath = os.path.join(tmpdir, pset.filename + '_candidates.las')
            pset.save(cpath)

            if FLAGS.output_type is OutputType.LABELS:
                opath = os.path.join(tmpdir, pset.filename + '.las')
            else:
                opath = os.path.join(FLAGS.output_path, pset.filename + '.las')

            # Run nearest neighbor voting algorithm to classify original points (pdal pipeline):
            pipeline = {
                'pipeline': [
                    ipath,
                    {
                        'type': 'filters.neighborclassifier',
                        'k': FLAGS.n_angles * 4 + 1,
                        'candidate': cpath
                    },  # Note: number of votes is FLAGS.n_angles*4+1, where 4 comes from splitting the point cloud (nominal number of overlapping subtiles per point before rotations)
                    opath
                ]
            }
            p = subprocess.run([
                '/home/xaserver/anaconda3/envs/cpdal-run/bin/pdal', 'pipeline',
                '-s'
            ],
                               input=json.dumps(pipeline).encode())
            if p.returncode:
                raise ValueError('Failed to run pipeline: \n"' +
                                 json.dumps(pipeline) + '"')

            if not FLAGS.output_type is OutputType.LAS:
                # Load in updated point cloud, save classification file
                pset2 = PointSet(opath)
                pset2.save_classifications_txt(
                    os.path.join(FLAGS.output_path,
                                 pset.filename + '_CLS.txt'))
            output_queue.task_done()
            logging.debug('Finished {}'.format(pset.filename))
    logging.debug('Post-processing thread finished')
Пример #4
0
from random import randint
from enum import Enum
from math import pi, asin

from gui import GUI
from pointset import PointSet

from algorithm import visibility_polygon

pointset = PointSet(500, 500, visibility_polygon)
gui = GUI(pointset)

gui.mainloop()
Пример #5
0
from random import randint
from enum import Enum
from math import pi, asin

from gui import GUI
from pointset import PointSet
from alg1 import convex_hull as algorithm1
from alg5 import convex_hull as algorithm5


pointset = PointSet(500, 500, algorithm5)
gui = GUI(pointset)

gui.mainloop()
Пример #6
0
from random import randint
from enum import Enum
from math import pi, asin

from gui import GUI
from pointset import PointSet
from brute_circle_algorithm import enclosing_circle as brute_circle_algorithm
from minidisc_algorithm import minidisc as minidisc_algorithm

pointset = PointSet(500, 500, minidisc_algorithm)
gui = GUI(pointset)

gui.mainloop()
Пример #7
0
from gui import GUI
from pointset import PointSet

pointset = PointSet(500, 500)
gui = GUI(pointset)

gui.mainloop()
Пример #8
0
from random import randint
from enum import Enum
from math import pi, asin

from gui import GUI
from pointset import PointSet
from smallest_rectangle import smallest_rectangle

pointset = PointSet(500, 500, smallest_rectangle)
gui = GUI(pointset)

gui.mainloop()