def _test_class_inf(self, props, exp_class_ids, default_class_id=None): geojson = { 'type': 'FeatureCollection', 'features': [{ 'properties': props, 'geometry': { 'type': 'Point', 'coordinates': [1, 1] } }] } json_to_file(geojson, self.uri) class_map = ClassMap.construct_from(['building', 'car', 'tree']) class_id_to_filter = { 1: ['==', 'type', 'building'], 2: ['any', ['==', 'type', 'car'], ['==', 'type', 'auto']] } b = GeoJSONVectorSourceConfigBuilder() \ .with_class_inference(class_id_to_filter=class_id_to_filter, default_class_id=default_class_id) \ .with_uri(self.uri) \ .build() msg = b.to_proto() config = GeoJSONVectorSourceConfig.from_proto(msg) source = config.create_source(crs_transformer=IdentityCRSTransformer(), class_map=class_map) trans_geojson = source.get_geojson() class_ids = [ f['properties']['class_id'] for f in trans_geojson['features'] ] self.assertEqual(class_ids, exp_class_ids)
def setUp(self): self.crs_transformer = DoubleCRSTransformer() self.geojson = { 'type': 'FeatureCollection', 'features': [{ 'type': 'Feature', 'geometry': { 'type': 'MultiPolygon', 'coordinates': [[[[0., 0.], [0., 2.], [2., 2.], [2., 0.], [0., 0.]]]] }, 'properties': { 'class_name': 'car', 'class_id': 1, 'score': 0.0 } }, { 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[2., 2.], [2., 4.], [4., 4.], [4., 2.], [2., 2.]]] }, 'properties': { 'score': 0.0, 'class_name': 'house', 'class_id': 2 } }] } self.class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'house')]) class MockTaskConfig(): def __init__(self, class_map): self.class_map = class_map self.task_config = MockTaskConfig(self.class_map) self.box1 = Box.make_square(0, 0, 4) self.box2 = Box.make_square(4, 4, 4) self.class_id1 = 1 self.class_id2 = 2 self.background_class_id = 3 geoms = [] for f in self.geojson['features']: g = shape(f['geometry']) g.class_id = f['properties']['class_id'] geoms.append(g) self.str_tree = STRtree(geoms) self.file_name = 'labels.json' self.temp_dir = RVConfig.get_tmp_dir() self.uri = os.path.join(self.temp_dir.name, self.file_name) json_to_file(self.geojson, self.uri)
def create_tf_example(image: np.ndarray, window: Box, labels: np.ndarray, class_map: ClassMap, chip_id: str = ''): """Create a TensorFlow Example from an image, the labels, &c. Args: image: An np.ndarray containing the image data. window: A Box object containing the bounding box for this example. labels: An nd.array containing the label data. class_map: A ClassMap object containing mappings between numerical and textual labels. chip_id: The chip id as a string. Returns: A DeepLab-compatible TensorFlow Example object containing the given data. """ import tensorflow as tf from object_detection.utils import dataset_util class_keys = set(class_map.get_keys()) def _clean(n): return (n if n in class_keys else 0x00) clean = np.vectorize(_clean, otypes=[np.uint8]) image_encoded = numpy_to_png(image) image_filename = chip_id.encode('utf8') image_format = 'png'.encode('utf8') image_height, image_width, image_channels = image.shape image_segmentation_class_encoded = numpy_to_png(clean(labels)) image_segmentation_class_format = 'png'.encode('utf8') features = tf.train.Features( feature={ 'image/encoded': dataset_util.bytes_feature(image_encoded), 'image/filename': dataset_util.bytes_feature(image_filename), 'image/format': dataset_util.bytes_feature(image_format), 'image/height': dataset_util.int64_feature(image_height), 'image/width': dataset_util.int64_feature(image_width), 'image/channels': dataset_util.int64_feature(image_channels), 'image/segmentation/class/encoded': dataset_util.bytes_feature(image_segmentation_class_encoded), 'image/segmentation/class/format': dataset_util.bytes_feature(image_segmentation_class_format), }) return tf.train.Example(features=features)
def test_enough_target_pixels_true(self): data = np.zeros((10, 10, 3), dtype=np.uint8) data[4:, 4:, :] = [1, 1, 1] raster_source = MockRasterSource([0, 1, 2], 3) raster_source.set_raster(data) rgb_class_map = ClassMap([ClassItem(id=1, color='#010101')]) label_source = SemanticSegmentationLabelSource( source=raster_source, rgb_class_map=rgb_class_map) with label_source.activate(): extent = Box(0, 0, 10, 10) self.assertTrue(label_source.enough_target_pixels(extent, 30, [1]))
def setUp(self): self.class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'house')]) self.npboxes = np.array([ [0., 0., 2., 2.], [2., 2., 4., 4.], ]) self.class_ids = np.array([1, 2]) self.scores = np.array([0.9, 0.9]) self.labels = ObjectDetectionLabels( self.npboxes, self.class_ids, scores=self.scores)
def with_rgb_class_map(self, rgb_class_map): """Set rgb_class_map. Args: rgb_class_map: (something accepted by ClassMap.construct_from) a class map with color values used to map RGB values to class ids Returns: SemanticSegmentationLabelSourceConfigBuilder """ b = deepcopy(self) b.config['rgb_class_map'] = ClassMap.construct_from(rgb_class_map) return b
def test_evaluator(self): class_map = ClassMap([ ClassItem(id=1, name='one'), ClassItem(id=2, name='two'), ]) output_uri = join(self.tmp_dir.name, 'out.json') scenes = [self.get_scene(1), self.get_scene(2)] evaluator = SemanticSegmentationEvaluator(class_map, output_uri, None) evaluator.process(scenes, self.tmp_dir.name) eval_json = json.loads(file_to_str(output_uri)) exp_eval_json = json.loads( file_to_str(data_file_path('expected-eval.json'))) self.assertDictEqual(eval_json, exp_eval_json)
def with_classes( self, classes: Union[ClassMap, List[str], List[ClassItemMsg], List[ ClassItem], Dict[str, int], Dict[str, Tuple[int, str]]]): """Set the classes for this task. Args: classes: Either a list of class names, a dict which maps class names to class ids, or a dict which maps class names to a tuple of (class_id, color), where color is a PIL color string. """ b = deepcopy(self) b.config['class_map'] = ClassMap.construct_from(classes) return b
def test_get_labels_rgb(self): data = np.zeros((10, 10, 3), dtype=np.uint8) data[7:, 7:, :] = [1, 1, 1] raster_source = MockRasterSource([0, 1, 2], 3) raster_source.set_raster(data) rgb_class_map = ClassMap([ClassItem(id=1, color='#010101')]) label_source = SemanticSegmentationLabelSource( source=raster_source, rgb_class_map=rgb_class_map) with label_source.activate(): window = Box.make_square(7, 7, 3) labels = label_source.get_labels(window=window) label_arr = labels.get_label_arr(window) expected_label_arr = np.ones((3, 3)) np.testing.assert_array_equal(label_arr, expected_label_arr)
def setUp(self): self.class_map = ClassMap([ ClassItem(id=1, color='red'), ClassItem(id=2, color='green'), ClassItem(id=3, color='blue') ]) self.transformer = SegmentationClassTransformer(self.class_map) self.rgb_image = np.zeros((1, 3, 3)) self.rgb_image[0, 0, :] = color_to_triple('red') self.rgb_image[0, 1, :] = color_to_triple('green') self.rgb_image[0, 2, :] = color_to_triple('blue') self.class_image = np.array([[1, 2, 3]])
def test_compute(self): class_map = ClassMap( [ClassItem(id=1, name='one'), ClassItem(id=2, name='two')]) # Mismatches: 0 -> 1, 2 -> 1, 1 -> 0 gt_array = np.ones((4, 4, 1), dtype=np.uint8) gt_array[0, 0, 0] = 0 gt_array[2, 2, 0] = 2 gt_raster = MockRasterSource([0], 1) gt_raster.set_raster(gt_array) gt_label_source = SemanticSegmentationLabelSource(source=gt_raster) p_array = np.ones((4, 4, 1), dtype=np.uint8) p_array[1, 1, 0] = 0 p_raster = MockRasterSource([0], 1) p_raster.set_raster(p_array) p_label_source = SemanticSegmentationLabelSource(source=p_raster) eval = SemanticSegmentationEvaluation(class_map) eval.compute(gt_label_source.get_labels(), p_label_source.get_labels()) tp1 = 16 - 3 # 4*4 - 3 true positives for class 1 fp1 = 1 # 1 false positive (2,2) and one don't care at (0,0) fn1 = 1 # one false negative (1,1) precision1 = float(tp1) / (tp1 + fp1) recall1 = float(tp1) / (tp1 + fn1) f11 = 2 * float(precision1 * recall1) / (precision1 + recall1) tp2 = 0 # 0 true positives for class 2 fn2 = 1 # one false negative (2,2) precision2 = None # float(tp2) / (tp2 + fp2) where fp2 == 0 recall2 = float(tp2) / (tp2 + fn2) f12 = None self.assertAlmostEqual(precision1, eval.class_to_eval_item[1].precision) self.assertAlmostEqual(recall1, eval.class_to_eval_item[1].recall) self.assertAlmostEqual(f11, eval.class_to_eval_item[1].f1) self.assertEqual(precision2, eval.class_to_eval_item[2].precision) self.assertAlmostEqual(recall2, eval.class_to_eval_item[2].recall) self.assertAlmostEqual(f12, eval.class_to_eval_item[2].f1) avg_conf_mat = np.array([[0, 0, 0], [1., 13, 0], [0, 1, 0]]) avg_recall = (14 / 15) * recall1 + (1 / 15) * recall2 self.assertTrue(np.array_equal(avg_conf_mat, eval.avg_item.conf_mat)) self.assertEqual(avg_recall, eval.avg_item.recall)
def setUp(self): self.prev_keys = (os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')) os.environ['AWS_ACCESS_KEY_ID'] = 'DUMMY' os.environ['AWS_SECRET_ACCESS_KEY'] = 'DUMMY' self.mock_s3 = mock_s3() self.mock_s3.start() self.file_name = 'labels.json' self.temp_dir = RVConfig.get_tmp_dir() self.file_path = os.path.join(self.temp_dir.name, self.file_name) self.crs_transformer = DoubleCRSTransformer() self.geojson = { 'type': 'FeatureCollection', 'features': [{ 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[0., 0.], [0., 1.], [1., 1.], [1., 0.], [0., 0.]]] }, 'properties': { 'class_id': 1, 'score': 0.9 } }, { 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[1., 1.], [1., 2.], [2., 2.], [2., 1.], [1., 1.]]] }, 'properties': { 'score': 0.9, 'class_id': 2 } }] } self.extent = Box.make_square(0, 0, 10) self.class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'house')]) json_to_file(self.geojson, self.file_path)
def setUp(self): self.crs_transformer = DoubleCRSTransformer() self.geojson = { 'type': 'FeatureCollection', 'features': [{ 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[0., 0.], [0., 1.], [1., 1.], [1., 0.], [0., 0.]]] }, 'properties': { 'class_name': 'car', 'class_id': 1 } }, { 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[1., 1.], [1., 2.], [2., 2.], [2., 1.], [1., 1.]]] }, 'properties': { 'class_name': 'house', 'class_id': 2 } }] } self.class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'house')]) class MockTaskConfig(): def __init__(self, class_map): self.class_map = class_map self.task_config = MockTaskConfig(self.class_map) self.temp_dir = RVConfig.get_tmp_dir() self.uri = os.path.join(self.temp_dir.name, 'labels.json') json_to_file(self.geojson, self.uri)
def test_vector_compute(self): class_map = ClassMap([ClassItem(id=1, name='one', color='#000021')]) gt_uri = data_file_path('3-gt-polygons.geojson') pred_uri = data_file_path('3-pred-polygons.geojson') eval = SemanticSegmentationEvaluation(class_map) eval.compute_vector(gt_uri, pred_uri, 'polygons', 1) # NOTE: The two geojson files referenced above contain three # unique geometries total, each file contains two geometries, # and there is one geometry shared between the two. tp = 1.0 fp = 1.0 fn = 1.0 precision = float(tp) / (tp + fp) recall = float(tp) / (tp + fn) self.assertAlmostEqual(precision, eval.class_to_eval_item[1].precision) self.assertAlmostEqual(recall, eval.class_to_eval_item[1].recall)
def test_vector_evaluator(self): class_map = ClassMap([ ClassItem(id=1, name='one'), ClassItem(id=2, name='two'), ]) output_uri = join(self.tmp_dir.name, 'raster-out.json') vector_output_uri = join(self.tmp_dir.name, 'vector-out.json') scenes = [self.get_vector_scene(1), self.get_vector_scene(2)] evaluator = SemanticSegmentationEvaluator(class_map, output_uri, vector_output_uri) evaluator.process(scenes, self.tmp_dir.name) vector_eval_json = json.loads(file_to_str(vector_output_uri)) exp_vector_eval_json = json.loads( file_to_str(data_file_path('expected-vector-eval.json'))) # NOTE: The precision and recall values found in the file # `expected-vector-eval.json` are equal to fractions of the # form (n-1)/n for n <= 7 which can be seen to be (and have # been manually verified to be) correct. self.assertDictEqual(vector_eval_json, exp_vector_eval_json)
def transform_geojson(self, geojson, line_bufs=None, point_bufs=None, crs_transformer=None, to_map_coords=False): if crs_transformer is None: crs_transformer = IdentityCRSTransformer() class_map = ClassMap.construct_from(['building']) json_to_file(geojson, self.uri) b = GeoJSONVectorSourceConfigBuilder() \ .with_uri(self.uri) \ .with_buffers(line_bufs=line_bufs, point_bufs=point_bufs) \ .build() msg = b.to_proto() config = GeoJSONVectorSourceConfig.from_proto(msg) source = config.create_source(crs_transformer=crs_transformer, class_map=class_map) return source.get_geojson(to_map_coords=to_map_coords)
def test_compute_ignore_class(self): gt_array = np.ones((4, 4, 1), dtype=np.uint8) gt_array[0, 0, 0] = 0 gt_raster = MockRasterSource([0], 1) gt_raster.set_raster(gt_array) gt_label_source = SemanticSegmentationLabelSource(source=gt_raster) pred_array = np.ones((4, 4, 1), dtype=np.uint8) pred_raster = MockRasterSource([0], 1) pred_raster.set_raster(pred_array) pred_label_source = SemanticSegmentationLabelSource(source=pred_raster) class_map = ClassMap( [ClassItem(id=0, name='ignore'), ClassItem(id=1, name='one')]) eval = SemanticSegmentationEvaluation(class_map) eval.compute(gt_label_source.get_labels(), pred_label_source.get_labels()) self.assertAlmostEqual(1, len(eval.class_to_eval_item)) self.assertAlmostEqual(1.0, eval.class_to_eval_item[0].f1)
def setUp(self): self.file_name = 'labels.json' self.temp_dir = RVConfig.get_tmp_dir() self.file_path = os.path.join(self.temp_dir.name, self.file_name) self.crs_transformer = DoubleCRSTransformer() self.geojson = { 'type': 'FeatureCollection', 'features': [{ 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[0., 0.], [0., 1.], [1., 1.], [1., 0.], [0., 0.]]] }, 'properties': { 'class_id': 1, 'score': 0.9 } }, { 'type': 'Feature', 'geometry': { 'type': 'Polygon', 'coordinates': [[[1., 1.], [1., 2.], [2., 2.], [2., 1.], [1., 1.]]] }, 'properties': { 'score': 0.9, 'class_id': 2 } }] } self.extent = Box.make_square(0, 0, 10) self.class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'house')]) json_to_file(self.geojson, self.file_path)
def test_compute(self): class_map = ClassMap([ ClassItem(id=1, name='one', color='#010101'), ClassItem(id=2, name='two', color='#020202') ]) gt_array = np.ones((4, 4, 3), dtype=np.uint8) gt_array[0, 0, :] = 0 gt_array[2, 2, :] = 2 gt_raster = MockRasterSource([0, 1, 2], 3) gt_raster.set_raster(gt_array) gt_label_source = SemanticSegmentationLabelSource( source=gt_raster, rgb_class_map=class_map) p_array = np.ones((4, 4, 3), dtype=np.uint8) p_array[1, 1, :] = 0 p_raster = MockRasterSource([0, 1, 2], 3) p_raster.set_raster(p_array) p_label_source = SemanticSegmentationLabelSource( source=p_raster, rgb_class_map=class_map) eval = SemanticSegmentationEvaluation(class_map) eval.compute(gt_label_source.get_labels(), p_label_source.get_labels()) tp1 = 16 - 3 # 4*4 - 3 true positives for class 1 fp1 = 1 # 1 false positive (2,2) and one don't care at (0,0) fn1 = 1 # one false negative (1,1) precision1 = float(tp1) / (tp1 + fp1) recall1 = float(tp1) / (tp1 + fn1) tp2 = 0 # 0 true positives for class 2 fn2 = 1 # one false negative (2,2) precision2 = None # float(tp2) / (tp2 + fp2) where fp2 == 0 recall2 = float(tp2) / (tp2 + fn2) self.assertAlmostEqual(precision1, eval.class_to_eval_item[1].precision) self.assertAlmostEqual(recall1, eval.class_to_eval_item[1].recall) self.assertEqual(precision2, eval.class_to_eval_item[2].precision) self.assertAlmostEqual(recall2, eval.class_to_eval_item[2].recall)
def from_proto(self, msg): b = SemanticSegmentationLabelSourceConfigBuilder() label_source_msg = msg.semantic_segmentation_label_source # Add for backwards compatibility. if msg.HasField('semantic_segmentation_raster_source'): label_source_msg = msg.semantic_segmentation_raster_source raster_source_config = rv.RasterSourceConfig.from_proto( label_source_msg.source) b = self.with_raster_source(raster_source_config) rgb_class_items = msg.semantic_segmentation_raster_source.rgb_class_items b = b.with_raster_source(raster_source_config) rgb_class_items = label_source_msg.rgb_class_items if rgb_class_items: b = b.with_rgb_class_map( ClassMap.construct_from(list(rgb_class_items))) return b
def make_debug_images(record_path: str, output_dir: str, class_map: ClassMap, p: float) -> None: """Render a random sample of the TFRecords in a given file as human-viewable PNG files. Args: record_path: Path to the TFRecord file. output_dir: Destination directory for the generated PNG files. p: The probability of rendering a particular record. Returns: None """ import tensorflow as tf make_dir(output_dir) ids = class_map.get_keys() color_strs = list(map(lambda c: c.color, class_map.get_items())) color_ints = list(map(lambda c: color_to_integer(c), color_strs)) correspondence = dict(zip(ids, color_ints)) def _label_fn(v: int) -> int: if v in correspondence: return correspondence.get(v) else: return 0 label_fn = np.vectorize(_label_fn, otypes=[np.uint64]) def _image_fn(pixel: int) -> int: if (pixel & 0x00ffffff): r = ((pixel >> 41 & 0x7f) + (pixel >> 17 & 0x7f)) << 16 g = ((pixel >> 33 & 0x7f) + (pixel >> 9 & 0x7f)) << 8 b = ((pixel >> 25 & 0x7f) + (pixel >> 1 & 0x7f)) << 0 return r + g + b else: return pixel >> 24 image_fn = np.vectorize(_image_fn, otypes=[np.uint64]) log.info('Generating debug chips') tfrecord_iter = tf.python_io.tf_record_iterator(record_path) for ind, example in enumerate(tfrecord_iter): if np.random.rand() <= p: example = tf.train.Example.FromString(example) im_unpacked, labels = parse_tf_example(example) im_r = np.array(im_unpacked[:, :, 0], dtype=np.uint64) * 1 << 40 im_g = np.array(im_unpacked[:, :, 1], dtype=np.uint64) * 1 << 32 im_b = np.array(im_unpacked[:, :, 2], dtype=np.uint64) * 1 << 24 im_packed = im_r + im_g + im_b labels_packed = label_fn(np.array(labels)) im_labels_packed = im_packed + labels_packed im_packed = image_fn(im_labels_packed) im_unpacked[:, :, 0] = np.bitwise_and( im_packed >> 16, 0xff, dtype=np.uint8) im_unpacked[:, :, 1] = np.bitwise_and( im_packed >> 8, 0xff, dtype=np.uint8) im_unpacked[:, :, 2] = np.bitwise_and( im_packed >> 0, 0xff, dtype=np.uint8) output_path = join(output_dir, '{}.png'.format(ind)) save_img(im_unpacked, output_path)
def setUp(self): self.class_id_to_filter = {1: ['has', 'building']} self.class_map = ClassMap.construct_from(['building']) self.crs_transformer = IdentityCRSTransformer()
def make_class_map(self): class_items = [ClassItem(1, 'grassy'), ClassItem(2, 'urban')] return ClassMap(class_items)
def generate_scene(task, tiff_path, labels_path, chip_size, chips_per_dimension): """Generate a synthetic object detection scene. Randomly generates a GeoTIFF with red and greed boxes denoting two classes and a corresponding label file. This is useful for generating synthetic scenes for testing purposes. """ class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'building')]) # make extent that's divisible by chip_size chip_size = chip_size ymax = chip_size * chips_per_dimension xmax = chip_size * chips_per_dimension extent = Box(0, 0, ymax, xmax) # make windows along grid windows = extent.get_windows(chip_size, chip_size) # for each window, make some random boxes within it and render to image nb_channels = 3 image = np.zeros((ymax, xmax, nb_channels)).astype(np.uint8) boxes = [] class_ids = [] for window in windows: # leave some windows blank if random.uniform(0, 1) > 0.3: # pick a random class class_id = random.randint(1, 2) box = window.make_random_square(50).to_int() boxes.append(box) class_ids.append(class_id) image[box.ymin:box.ymax, box.xmin:box.xmax, class_id - 1] = 255 # save image as geotiff centered in philly transform = from_origin(-75.163506, 39.952536, 0.000001, 0.000001) print('Generated {} boxes with {} different classes.'.format( len(boxes), len(set(class_ids)))) with rasterio.open(tiff_path, 'w', driver='GTiff', height=ymax, transform=transform, crs='EPSG:4326', compression=rasterio.enums.Compression.none, width=xmax, count=nb_channels, dtype='uint8') as dst: for channel_ind in range(0, nb_channels): dst.write(image[:, :, channel_ind], channel_ind + 1) if task == 'object_detection': # make OD labels and make boxes npboxes = Box.to_npboxes(boxes) class_ids = np.array(class_ids) labels = ObjectDetectionLabels(npboxes, class_ids) # save labels to geojson with rasterio.open(tiff_path) as image_dataset: crs_transformer = RasterioCRSTransformer(image_dataset) od_file = ObjectDetectionGeoJSONStore(labels_path, crs_transformer, class_map) od_file.save(labels) elif task == 'semantic_segmentation': label_image = np.zeros((ymax, xmax, 1)).astype(np.uint8) for box, class_id in zip(boxes, class_ids): label_image[box.ymin:box.ymax, box.xmin:box.xmax, 0] = class_id # save labels to raster with rasterio.open(labels_path, 'w', driver='GTiff', height=ymax, transform=transform, crs='EPSG:4326', compression=rasterio.enums.Compression.none, width=xmax, count=1, dtype='uint8') as dst: dst.write(label_image[:, :, 0], 1)
def make_class_map(self): class_items = [ClassItem(1, 'car'), ClassItem(2, 'building')] return ClassMap(class_items)