def test_zero_points_nan_outcome(self): """Zero points should return NaN.""" point_cloud = create_point_cloud(np.zeros(0), np.zeros(0), np.zeros(0)) targets = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) result = BandRatioFeatureExtractor(2, 4).extract(point_cloud, [[]], targets, 0, Cell(4)) self.assertTrue(np.isnan(result[0]))
def test_cell_grid_origin(self): _, points = create_points_in_xy_grid(lambda x, y: np.random.rand()) point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) targets = create_point_cloud(np.array([0]), np.array([0]), np.array([0])) # Center of grid neighborhoods = list( compute_neighborhoods(point_cloud, targets, Cell(1.99))) assert_equal(len(neighborhoods[0]), 1)
def test_cube_grid(self): _, points = create_points_in_xy_grid(lambda x, y: 10 * (x % 2)) point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) targets = create_point_cloud(np.array([4.5]), np.array([4.5]), np.array([0])) # Center of grid neighborhoods = list( compute_neighborhoods(point_cloud, targets, Cube(2))) assert_equal(len(neighborhoods[0]), 2)
def assert_std_for_z_function_in_xy_grid(z_checkered, expected): """Assert that the standard deviation of z values in a grid of unit x and y""" n_points, points = create_points_in_xy_grid(z_checkered) point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) targets = create_point_cloud([0], [0], [0]) compute_features(point_cloud, [range(n_points)], 0, targets, ['sigma_z'], InfiniteCylinder(10)) np.testing.assert_almost_equal(targets[keys.point]['sigma_z']['data'][0], expected)
def test_cell_grid(self): _, points = create_points_in_xy_grid(lambda x, y: np.random.rand()) point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) targets = create_point_cloud(np.array([4.5]), np.array([4.5]), np.array([4.5])) # Center of grid neighborhoods = compute_neighborhoods(point_cloud, targets, Cell(2)) neighborhood = next(neighborhoods) assert_equal(len(neighborhood[0]), 4)
def test_no_lower_bound_correct_outcome(self): """No lower bound should return all points below.""" n = 10 point_cloud = create_point_cloud(np.zeros(n), np.zeros(n), np.hstack([np.zeros(8), np.ones(2)])) targets = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) result = BandRatioFeatureExtractor(None, 0.5).extract( point_cloud, [range(n)], targets, 0, Cell(4)) np.testing.assert_equal(result[0], 0.8)
def test_cell_grid_larger_sample_size(self): _, points = create_points_in_xy_grid(lambda x, y: np.random.rand()) point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) targets = create_point_cloud(np.array([4.5]), np.array([4.5]), np.array([4.5])) # Center of grid neighborhoods = compute_neighborhoods( point_cloud, targets, Cell(5), sample_size=10000) # Result 36 neighbors _ = next(neighborhoods)
def test_cell(self): n_included = 123 n_excluded = 456 x = np.append(np.zeros(n_included), np.ones(n_excluded)) environment = create_point_cloud(x, x, x) target = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) cube = Cube(1) # volume = 1.0 neighborhoods = compute_neighborhoods(environment, target, cube) extractor = PointDensityFeatureExtractor() densities = extractor.extract(environment, neighborhoods, target, [0], cube) np.testing.assert_allclose(densities, n_included)
def test_cell(self): n_included = 123 n_excluded = 456 x = np.append(np.zeros(n_included), np.ones(n_excluded)) environment = create_point_cloud(x, x, x) target = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) cube = Cube(1) # volume = 1.0 neighbors_index = compute_neighborhoods(environment, target, cube) extractor = PointDensityFeatureExtractor() for index in neighbors_index: d = extractor.extract(environment, index, target, [0], cube) self.assertEqual(d, n_included)
def test_target_number_matches_neighborhood_number(self): _, points = create_points_in_xy_grid(lambda x, y: 10 * (x % 2)) environment_point_cloud = create_point_cloud(points[:, 0], points[:, 1], points[:, 2]) assert_target_number_matches_neighborhood_number( environment_point_cloud)
def test_cell_no_points(self): point_cloud = create_emtpy_point_cloud() targets = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) neighborhoods = compute_neighborhoods(point_cloud, targets, Cell(1)) neighborhood = next(neighborhoods) print(neighborhood) assert_equal(len(neighborhood[0]), 0)
def _create_targets_and_extract_features(feature_names, n): x = np.ones(n) y = np.ones(n) z = np.ones(n) target = test_tools.create_point_cloud(x, y, z) _compute_features(target, feature_names) return target
def assert_target_number_matches_neighborhood_number(environment_point_cloud): n_targets = 99 targets = create_point_cloud(np.array(range(n_targets)), np.array(range(n_targets)), np.array(range(n_targets))) neighborhoods = list( compute_neighborhoods(environment_point_cloud, targets, Cube(2))) assert_equal(len(neighborhoods), n_targets)
def _get_random_targets(self): """Get a random target pc.""" num_all_pc_points = len(self.point_cloud[keys.point]["x"]["data"]) rand_indices = [ random.randint(0, num_all_pc_points) for p in range(20) ] x, y, z = utils.get_point(self.point_cloud, rand_indices) return create_point_cloud(x, y, z)
def test_normalize_tiny_unequal_point_cloud(self): point_cloud = create_point_cloud([0, 0, 0], [0, 0, 0], [1, 2, 3]) normalized_point_cloud = normalize(point_cloud) normalized_values = get_attribute_value(normalized_point_cloud, range(3), normalized_height) np.testing.assert_allclose(normalized_values, np.array([0, 1, 2]), atol=1e-7)
def test_percentile_z(self): xyz = np.array([list(p) for p in list(itertools.product(np.linspace(0, 1, 11), repeat=3))]) point_cloud = create_point_cloud(xyz[:, 0], xyz[:, 1], xyz[:, 2]) expected = np.linspace(0.1, 1.0, 10) extractors = [PercentileFeatureExtractor(p) for p in range(10, 110, 10)] percentiles = [e.extract(point_cloud, [range(len(xyz))], None, None, None)[0] for e in extractors] np.testing.assert_allclose(percentiles, expected)
def test_percentile_norm_z(self): xyz = np.array([list(p) for p in list(itertools.product(np.linspace(0, 1, 11), repeat=3))]) point_cloud = create_point_cloud(xyz[:, 0], xyz[:, 1], np.zeros_like(xyz[:, 2]), normalized_z=xyz[:, 2]) expected = np.linspace(0.1, 1.0, 10) extractors = [PercentileFeatureExtractor(p, data_key=keys.normalized_height) for p in range(10, 110, 10)] percentiles = np.hstack([e.extract(point_cloud, [range(len(xyz))], None, None, None)[0] for e in extractors]) np.testing.assert_allclose(percentiles, expected)
def test_normalize_tiny_unequal_point_cloud_multiple_cells(self): """Last of the 3 points is not in the neighborhood of the others.""" point_cloud = create_point_cloud([0, 0, 5], [0, 0, 0], [1, 2, 3]) normalized_point_cloud = normalize(point_cloud, cell_size=2) normalized_values = get_attribute_value(normalized_point_cloud, range(3), normalized_height) np.testing.assert_allclose(normalized_values, np.array([0, 1, 0]), atol=1e-7)
def test_use_norm_z(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 5]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] kurtosis = self.extractor.extract(point_cloud, neighborhood, None, None, None)[0] self.assertAlmostEqual(kurtosis, -1.5)
def test_use_norm_z(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 6]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] median = self.extractor.extract(point_cloud, neighborhood, None, None, None) np.testing.assert_almost_equal(median, 4)
def test_vectorized_chunks(): """Should not throw error for non requested but provided features.""" feature_map._get_default_extractors = _get_test_extractors n = 2000000 # enough to be too big for a single chunk x = np.zeros(n) y = np.zeros(n) z = np.zeros(n) target = test_tools.create_point_cloud(x, y, z) feature_names = ['vectorized1'] _compute_features(target, feature_names)
def test_use_norm_z(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 6]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] skew = self.extractor.extract(point_cloud, neighborhood, None, None, None) self.assertGreater(skew, 0.1)
def test_use_optional_data_key(self): """Should use data under the given data key (normalized z).""" n = 10 zeros = np.zeros(n) point_cloud = create_point_cloud(zeros, zeros, zeros, normalized_z=np.hstack( [np.zeros(8), np.ones(2)])) targets = create_point_cloud(np.zeros(1), np.zeros(1), np.zeros(1)) extractor = BandRatioFeatureExtractor(None, 0.5, data_key=keys.normalized_height) result = extractor.extract(point_cloud, [range(n)], targets, 0, Cell(4)) np.testing.assert_equal(result[0], 0.8)
def test_use_norm_z(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 5]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhoods = [[0, 1, 2]] entropy = self.extractor.extract(point_cloud, neighborhoods, None, None, None)[0] self.assertNotAlmostEqual(entropy, 0)
def test_height_stats(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 5]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] variance = self.extractor.extract(point_cloud, [neighborhood], None, None, None)[0] self.assertAlmostEqual(variance, 2 / 3)
def test_with_neighborhood_generator(): """Should run for all extractors without error meaning that neighborhood generator is only iterated once. Using actual feature extractors here because test feature extractors don't use neighborhoods. """ n = 200 feature_names = ['vectorized1', 'test1_a', 'median_z', 'mean_z'] x = np.ones(n) y = np.ones(n) z = np.ones(n) target = test_tools.create_point_cloud(x, y, z) neighborhoods = ([] for _ in range(len(target["vertex"]["x"]["data"]))) feature_extraction.compute_features({}, neighborhoods, target, feature_names, Sphere(5))
def test_eigenvalues_of_too_few_points_results_in_0(): """If there are too few points to calculate the eigen values don't output NaN or inf.""" a = np.array([5]) pc = create_point_cloud(a, a, a) compute_features(pc, [[0]], pc, ["eigenv_1", "eigenv_2", "eigenv_3"], InfiniteCylinder(5)) eigen_val_123 = np.array( [pc[keys.point]['eigenv_{}'.format(i)]['data'] for i in [1, 2, 3]]) assert not np.any(np.isnan(eigen_val_123)) assert not np.any(np.isinf(eigen_val_123))
def assert_expected_ratios(expected_ratios, n_below_limits, n_withins, n_above_limits, volume=Cell(4)): n_ratios = len(expected_ratios) neighborhoods, point_cloud = generate_test_point_cloud_and_neighborhoods( n_below_limits, n_withins, n_above_limits, n_ratios) targets = create_point_cloud(np.zeros(n_ratios), np.zeros(n_ratios), np.zeros(n_ratios)) extractor = BandRatioFeatureExtractor(LOWER_LIMIT, UPPER_LIMIT) result = extractor.extract(point_cloud, neighborhoods, targets, 0, volume) np.testing.assert_allclose(result, expected_ratios)
def test_use_norm(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 5]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] extractor = RangeFeatureExtractor(data_key=keys.normalized_height) _max, _min, _range = extractor.extract(point_cloud, neighborhood, None, None, None) self.assertAlmostEquals(_max, 5) self.assertAlmostEquals(_min, 3) self.assertAlmostEquals(_range, 2)
def test_height_stats(self): x = y = np.array([0, 0, 0]) z = np.array([2, 2, 2]) normalized_z = np.array([3, 4, 5]) point_cloud = create_point_cloud(x, y, z, normalized_z=normalized_z) neighborhood = [[0, 1, 2]] mean, std, coeff = self.extractor.extract(point_cloud, neighborhood, None, None, None) self.assertAlmostEquals(mean, 4) self.assertAlmostEquals(std, np.sqrt(2 / 3)) self.assertAlmostEquals(coeff, np.sqrt(2 / 3) / 4)