Пример #1
0
def select_polygon(point_cloud, polygon_string, read_from_file=False, return_mask=False):
    """
    Return the selection of the input point cloud that contains only points within a given polygon.

    :param point_cloud: Input point cloud
    :param polygon_string: Polygon, either defined in a WKT string or in a file (WKT and ESRI formats supported)
    :param read_from_file: if true, polygon is expected to be the name of the file where the polygon is defined
    :param return_mask: if true, return a mask of selected points, rather than point cloud
    :return:
    """
    if point_cloud is None:
        raise ValueError('Input point cloud cannot be None.')
    if not isinstance(polygon_string, str):
        raise ValueError('Polygon (or its filename) should be a string')
    if read_from_file:
        format = os.path.splitext(polygon_string)[1].lower()
        reader = _get_polygon_reader(format)
        polygon = reader(polygon_string)
    else:
        polygon = _load_polygon(polygon_string)
    
    if isinstance(polygon, shapely.geometry.polygon.Polygon) and polygon.is_valid:
        points_in = _contains(point_cloud, polygon)
    elif isinstance(polygon,shapely.geometry.multipolygon.MultiPolygon) and polygon.is_valid:
        points_in = []
        count=1
        for poly in polygon:
            if not(count%200) or count==len(polygon):
                print('Checking polygon {}/{}...'.format(count, len(polygon)))
            points_in.extend(_contains(point_cloud, poly))
            count=count+1
        print('{} points found in {} polygons.'.format(len(points_in), len(polygon)))
    else:
        raise ValueError('It is not a Polygon or Multipolygon.')
    
    if return_mask: 
        mask = np.zeros(len(point_cloud['vertex']['x']['data']), dtype=bool)
        mask[points_in] = True
        return mask
    else:
        point_cloud_filtered = copy_point_cloud(point_cloud, points_in)
        add_metadata(point_cloud_filtered, sys.modules[__name__],
                    {'polygon_string': polygon_string,
                    'read_from_file': read_from_file})
        return point_cloud_filtered
Пример #2
0
def load(path, format=None, *args, **kwargs):
    """
    Read point cloud from a file.

    :param path:
    :param format:
    :param args: optional non-keyword arguments to be passed to the format-specific writer
    :param kwargs: optional keyword arguments to be passed to the format-specific writer
    :return: point cloud data
    """
    reader = get_io_handler(path, mode='r', format=format)
    point_cloud = reader.read(*args, **kwargs)
    add_metadata(point_cloud, sys.modules[__name__], {
        'path': path,
        'args': args,
        **kwargs
    })
    return point_cloud
Пример #3
0
def normalize(point_cloud, cell_size=None):
    z = point_cloud[keys.point]['z']['data']
    point_cloud[keys.point][normalized_height] = {"type": 'float64', "data": np.array(z)}
    if cell_size is None:
        n_points = point_cloud[keys.point][normalized_height]['data'].size
        min_z = _calculate_min_z(range(n_points), point_cloud)
        point_cloud[keys.point][normalized_height]['data'] = z - min_z
    else:
        targets = _create_spanning_grid(point_cloud, cell_size)

        neighborhoods = compute_neighborhoods(point_cloud, targets, Cell(cell_size), sample_size=None)
        for neighborhood in neighborhoods:
            min_z = _calculate_min_z(neighborhood, point_cloud)
            point_cloud[keys.point][normalized_height]['data'][neighborhood] = z[neighborhood] - min_z
    import sys
    module = sys.modules[__name__]
    add_metadata(point_cloud, module, {'cell_size':cell_size})
    return point_cloud
Пример #4
0
def select_below(point_cloud, attribute, threshold, return_mask=False):
    """
    Return the selection of the input point cloud that contains only points with a given attribute below some value.

    :param point_cloud: Input point cloud
    :param attribute: The attribute name used for selection
    :param threshold: The threshold value used for selection
    :param return_mask: If true, return the mask corresponding to the selection
    :return:
    """
    _check_valid_arguments(attribute, point_cloud)
    mask = point_cloud[point][attribute]['data'] < threshold
    if return_mask:
        return mask
    point_cloud_filtered = copy_point_cloud(point_cloud, mask)
    add_metadata(point_cloud_filtered, sys.modules[__name__],
                 {'attribute': attribute, 'threshold': threshold})
    return point_cloud_filtered
Пример #5
0
def compute_features(env_point_cloud, neighborhoods, target_point_cloud, feature_names, volume, verbose=True, **kwargs):
    """
    Compute features for each target and store result as point attributes in target point cloud.

    Example:
    >>> point_cloud = load('data1.ply')
    >>> target_point_cloud = load('data2.ply')
    >>> volume = build_volume('infinite cylinder', radius=4)
    >>> neighbors = compute_neighborhoods(point_cloud, target_point_cloud, volume)
    >>> neighborhoods = []
    >>> for x in neighbors:
    >>>   neighborhoods += x
    >>> compute_features(point_cloud, neighborhoods, target_point_cloud, ['eigenv_1', 'kurto_z'], volume)
    >>> eigenv_1 = target_point_cloud[point]['eigenv_1']['data']

    Results of the example above are stored in the target point cloud as extra point attributes.

    :param env_point_cloud: environment point cloud
    :param neighborhoods: list of neighborhoods which are themselves lists of indices referring to the environment
    :param target_point_cloud: point cloud of targets
    :param feature_names: list of features that are to be calculated
    :param volume: object describing the volume that contains the neighborhood points
    :param kwargs: keyword arguments for the individual feature extractors
    :param verbose: if true, output extra information
    :return: None, results are stored in attributes of the target point cloud
    """
    _verify_feature_names(feature_names)
    wanted_feature_names = feature_names + [existing_feature for existing_feature in target_point_cloud[point]]
    extended_features = _make_extended_feature_list(feature_names)

    for feature_name in extended_features:
        target_point_cloud[point][feature_name] = {"type": 'float64',
                                                   "data": np.zeros_like(target_point_cloud[point]['x']['data'],
                                                                         dtype=np.float64)}

    if provenance in env_point_cloud:
        utils.add_metadata(target_point_cloud, sys.modules[__name__],
                           {'env_point_cloud': {provenance: copy.copy(env_point_cloud[provenance])}})

    _add_features(extended_features, env_point_cloud, neighborhoods, target_point_cloud, volume, verbose, kwargs)

    _keep_only_wanted_features(target_point_cloud, wanted_feature_names)
Пример #6
0
def normalize(point_cloud, cell_size=None):
    z = point_cloud[keys.point]['z']['data']
    point_cloud[keys.point][normalized_height] = {"type": 'float64', "data": np.array(z)}
    if cell_size is None:
        n_points = point_cloud[keys.point][normalized_height]['data'].size
        _, min_z, _ = range_extractor().extract(point_cloud, range(n_points), None, None, None)
        point_cloud[keys.point][normalized_height]['data'] = z - min_z
    else:
        targets = create_spanning_grid(point_cloud, cell_size)

        neighborhood_sets = compute_neighborhoods(point_cloud, targets, Cell(cell_size), sample_size=None)

        for neighborhood_set in neighborhood_sets:
            for neighborhood in neighborhood_set:
                _, min_z, _ = range_extractor().extract(point_cloud, neighborhood, None, None, None)
                point_cloud[keys.point][normalized_height]['data'][neighborhood] = z[neighborhood] - min_z
    import sys
    module = sys.modules[__name__]
    add_metadata(point_cloud, module, {'cell_size':cell_size})
    return point_cloud
Пример #7
0
def select_equal(point_cloud, attribute, value, return_mask=False):
    """
    Return the selection of the input point cloud that contains only points with a given attribute equal to some value.
    If a list of values is given, select the points corresponding to any of the provided values.

    :param point_cloud: Input point cloud.
    :param attribute: The attribute name used for selection
    :param value: The value(s) to compare the attribute to
    :param return_mask: If true, return the mask corresponding to the selection
    :return:
    """
    _check_valid_arguments(attribute, point_cloud)
    # broadcast using shape of the values
    mask = point_cloud[point][attribute]['data'] == np.array(value)[..., None]
    if mask.ndim > 1:
        mask = np.any(mask, axis=0)  # reduce
    if return_mask:
        return mask
    point_cloud_filtered = copy_point_cloud(point_cloud, mask)
    add_metadata(point_cloud_filtered, sys.modules[__name__],
                 {'attribute': attribute, 'value': value})
    return point_cloud_filtered
Пример #8
0
def _compute_features_for_chunk(features_to_do, env_point_cloud, current_neighborhoods, target_point_cloud,
                                target_indices, volume,
                                verbose, kwargs):
    while features_to_do:
        feature_name = features_to_do[0]
        extractor = FEATURES[feature_name]

        if verbose:
            sys.stdout.write('Extracting feature(s) "{}"'.format(extractor.provides()))
            start = time.time()

        for key_word in kwargs:
            setattr(extractor, key_word, kwargs[key_word])
        _add_features_from_single_extractor(extractor, env_point_cloud, current_neighborhoods, target_point_cloud,
                                            target_indices, volume)
        utils.add_metadata(target_point_cloud, type(extractor).__module__, extractor.get_params())

        if verbose:
            elapsed = time.time() - start
            sys.stdout.write('Extracting feature(s) "{}" took {:.2f} seconds\n'.format(extractor.provides(), elapsed))

        for provided_feature in extractor.provides():
            if provided_feature in features_to_do:
                features_to_do.remove(provided_feature)
Пример #9
0
def compute_features(env_point_cloud,
                     neighborhoods,
                     target_idx_base,
                     target_point_cloud,
                     feature_names,
                     volume,
                     overwrite=False,
                     verbose=True,
                     **kwargs):
    """
    Compute features for each target and store result as point attributes in target point cloud.

    Example:
    >>> point_cloud = read_ply.read('data1.ply')
    >>> target_point_cloud = read_ply.read('data2.ply')
    >>> volume = volume_specification.InfiniteCylinder(4)
    >>> neighbors = compute_neighborhoods(point_cloud, target_point_cloud, volume)
    >>> neighborhoods = []
    >>> for x in neighbors:
    >>>   neighborhoods += x
    >>> compute_features(point_cloud, neighborhoods, target_point_cloud, ['eigenv_1', 'kurto_z'], volume)
    >>> eigenv_1 = target_point_cloud[point]['eigenv_1']['data']

    Results of the example above are stored in the target point cloud as extra point attributes.

    :param env_point_cloud: environment point cloud
    :param neighborhoods: list of neighborhoods which are themselves lists of indices referring to the environment
    :param target_point_cloud: point cloud of targets
    :param feature_names: list of features that are to be calculated
    :param volume: object describing the volume that contains the neighborhood points
    :param overwrite: if true, even features that are already in the targets point cloud will be calculated and stored
    :param kwargs: keyword arguments for the individual feature extractors
    :param verbose: if true, output extra information
    :return: None, results are stored in attributes of the target point cloud
    """
    _verify_feature_names(feature_names)
    wanted_feature_names = feature_names + [
        existing_feature for existing_feature in target_point_cloud[keys.point]
    ]
    extended_features = _make_extended_feature_list(feature_names)
    features_to_do = extended_features

    while features_to_do:
        feature_name = features_to_do[0]

        if (target_idx_base == 0) and (not overwrite) and (
                feature_name in target_point_cloud[keys.point]):
            continue  # Skip feature calc if it is already there and we do not overwrite

        extractor = FEATURES[feature_name]

        if verbose:
            sys.stdout.write('Feature(s) "{}"'.format(extractor.provides()))
            sys.stdout.flush()
            start = time.time()

        _add_or_update_feature(env_point_cloud, neighborhoods, target_idx_base,
                               target_point_cloud, extractor, volume,
                               overwrite, kwargs)
        utils.add_metadata(target_point_cloud,
                           type(extractor).__module__, extractor.get_params())

        if verbose:
            elapsed = time.time() - start
            sys.stdout.write(' took {:.2f} seconds\n'.format(elapsed))
            sys.stdout.flush()

        for provided_feature in extractor.provides():
            if provided_feature in features_to_do:
                features_to_do.remove(provided_feature)

    _keep_only_wanted_features(target_point_cloud, wanted_feature_names)
Пример #10
0
 def test_AddMetaDataToPointCloud(self):
     """ Test adding info to the point cloud for test module """
     pc = test_tools.generate_tiny_test_point_cloud()
     from laserchicken import select as somemodule
     utils.add_metadata(pc, somemodule, params=(0.5, "cylinder", 4))
     self.assertEqual(len(pc[keys.provenance]), 1)