示例#1
0
    def test_file_to_str_local(self):
        str_to_file(self.content_str, self.local_path)
        content_str = file_to_str(self.local_path)
        self.assertEqual(self.content_str, content_str)

        wrong_path = '/wrongpath/x.txt'
        with self.assertRaises(NotReadableError):
            file_to_str(wrong_path)
示例#2
0
    def test_file_to_str_s3(self):
        wrong_path = 's3://wrongpath/x.txt'

        with self.assertRaises(NotWritableError):
            str_to_file(self.content_str, wrong_path)

        str_to_file(self.content_str, self.s3_path)
        content_str = file_to_str(self.s3_path)
        self.assertEqual(self.content_str, content_str)

        with self.assertRaises(NotReadableError):
            file_to_str(wrong_path)
示例#3
0
 def test_evaluator(self):
     class_map = ClassMap([
         ClassItem(id=1, name='one'),
         ClassItem(id=2, name='two'),
     ])
     output_uri = join(self.tmp_dir.name, 'out.json')
     scenes = [self.get_scene(1), self.get_scene(2)]
     evaluator = SemanticSegmentationEvaluator(class_map, output_uri, None)
     evaluator.process(scenes, self.tmp_dir.name)
     eval_json = json.loads(file_to_str(output_uri))
     exp_eval_json = json.loads(
         file_to_str(data_file_path('expected-eval.json')))
     self.assertDictEqual(eval_json, exp_eval_json)
示例#4
0
    def get_vector_scene(self, class_id, use_aoi=False):
        gt_uri = data_file_path('{}-gt-polygons.geojson'.format(class_id))
        pred_uri = data_file_path('{}-pred-polygons.geojson'.format(class_id))

        scene_id = str(class_id)
        rs = MockRasterSource(channel_order=[0, 1, 3], num_channels=3)
        rs.set_raster(np.zeros((10, 10, 3)))

        crs_transformer = IdentityCRSTransformer()
        extent = Box.make_square(0, 0, 360)

        gt_rs = RasterizedSource(GeoJSONVectorSource(gt_uri, crs_transformer),
                                 RasterizedSourceConfig.RasterizerOptions(2),
                                 extent, crs_transformer)
        gt_ls = SemanticSegmentationLabelSource(source=gt_rs)

        pred_rs = RasterizedSource(
            GeoJSONVectorSource(pred_uri, crs_transformer),
            RasterizedSourceConfig.RasterizerOptions(2), extent,
            crs_transformer)
        pred_ls = SemanticSegmentationLabelSource(source=pred_rs)
        pred_ls.vector_output = [{
            'uri': pred_uri,
            'denoise': 0,
            'mode': 'polygons',
            'class_id': class_id
        }]

        if use_aoi:
            aoi_uri = data_file_path('{}-aoi.geojson'.format(class_id))
            aoi_geojson = json.loads(file_to_str(aoi_uri))
            aoi_polygons = [shape(aoi_geojson['features'][0]['geometry'])]
            return Scene(scene_id, rs, gt_ls, pred_ls, aoi_polygons)

        return Scene(scene_id, rs, gt_ls, pred_ls)
示例#5
0
    def with_template(self, template):
        """Use a template for TF Object Detection pipeline config.

        Args:
            template: A dict, string or uri as the base for the TF
                Object Detection API model training pipeline, for example those
                found here:
                https://github.com/tensorflow/models/tree/eef6bb5bd3b3cd5fcf54306bf29750b7f9f9a5ea/research/object_detection/samples/configs # noqa
        """
        template_json = None
        if type(template) is dict:
            template_json = template
        else:
            # Try parsing the string as a message, on fail assume it's a URI
            msg = None
            try:
                msg = text_format.Parse(template, TrainEvalPipelineConfig())
            except text_format.ParseError:
                msg = text_format.Parse(
                    file_to_str(template), TrainEvalPipelineConfig())
            template_json = json_format.MessageToDict(msg)

        b = deepcopy(self)
        b.config['tfod_config'] = template_json
        return b
示例#6
0
 def test_vector_evaluator(self):
     class_map = ClassMap([
         ClassItem(id=1, name='one'),
         ClassItem(id=2, name='two'),
     ])
     output_uri = join(self.tmp_dir.name, 'raster-out.json')
     vector_output_uri = join(self.tmp_dir.name, 'vector-out.json')
     scenes = [self.get_vector_scene(1), self.get_vector_scene(2)]
     evaluator = SemanticSegmentationEvaluator(class_map, output_uri,
                                               vector_output_uri)
     evaluator.process(scenes, self.tmp_dir.name)
     vector_eval_json = json.loads(file_to_str(vector_output_uri))
     exp_vector_eval_json = json.loads(
         file_to_str(data_file_path('expected-vector-eval.json')))
     # NOTE:  The precision  and recall  values found  in the  file
     # `expected-vector-eval.json`  are equal to fractions of  the
     # form (n-1)/n for  n <= 7 which  can be seen to  be (and have
     # been manually verified to be) correct.
     self.assertDictEqual(vector_eval_json, exp_vector_eval_json)
示例#7
0
    def test_download_if_needed_s3(self):
        with self.assertRaises(NotReadableError):
            download_if_needed(self.s3_path, self.temp_dir.name)

        str_to_file(self.content_str, self.local_path)
        upload_or_copy(self.local_path, self.s3_path)
        local_path = download_if_needed(self.s3_path, self.temp_dir.name)
        content_str = file_to_str(local_path)
        self.assertEqual(self.content_str, content_str)

        wrong_path = 's3://wrongpath/x.txt'
        with self.assertRaises(NotWritableError):
            upload_or_copy(local_path, wrong_path)
    def _test_get_geojson(self, vector_tile_uri, json_uri):
        source = self._get_source(vector_tile_uri)
        geojson = source.get_geojson()
        expected_geojson = json.loads(file_to_str(data_file_path(json_uri)))

        # Need to convert to JSON and back again because geojson object has tuples
        # instead of lists because of a quirk of shapely.geometry.mapping
        # See https://github.com/Toblerity/Shapely/issues/245
        geojson = json.loads(json.dumps(geojson))
        geojson['features'].sort(key=lambda f: f['properties']['__id'])
        expected_geojson['features'].sort(
            key=lambda f: f['properties']['__id'])

        self.assertDictEqual(geojson, expected_geojson)
 def _get_source(self, uri):
     b = VectorTileVectorSourceConfigBuilder() \
         .with_class_inference(class_id_to_filter=self.class_id_to_filter,
                               default_class_id=None) \
         .with_uri(uri) \
         .with_zoom(14) \
         .with_id_field('__id') \
         .with_buffers(line_bufs={1: 0.0001}, point_bufs={1: 0.0001}) \
         .build()
     config = VectorTileVectorSourceConfig.from_proto(b.to_proto())
     aoi_path = data_file_path('vector_tiles/lv-aoi.json')
     extent_geojson = json.loads(file_to_str(aoi_path))
     extent = Box.from_shapely(
         shape(extent_geojson['features'][0]['geometry']))
     source = config.create_source(self.crs_transformer, extent,
                                   self.class_map)
     return source
示例#10
0
def filter_geojson(labels_uri, output_uri, class_names):
    """Remove features that aren't in class_names and remove class_ids."""
    labels_str = file_to_str(labels_uri)
    labels = json.loads(labels_str)
    filtered_features = []

    for feature in labels['features']:
        feature = copy.deepcopy(feature)
        properties = feature.get('properties')
        if properties:
            class_name = properties.get('class_name') or properties('label')
            if class_name in class_names:
                del properties['class_id']
                filtered_features.append(feature)

    new_labels = {'features': filtered_features}
    str_to_file(json.dumps(new_labels), output_uri)
示例#11
0
def build_scenes(remote, test, task, channel_order):
    scenes_config = json.loads(file_to_str(scenes_config_path))
    train_scenes = []
    val_scenes = []

    if test:
        splits = {'paramaribo_test': {'train': [0], 'test': [1]}}
    else:
        splits = {
            'belice': {
                'train': [0, 1],
                'test': [2]
            },
            'georgetown': {
                'train': [0, 1],
                'test': [4]
            },
            'paramaribo': {
                'train': [0, 1],
                'test': [2]
            }
        }

    for city, split in splits.items():
        if split.get('train'):
            scene = build_scene(remote,
                                test,
                                task,
                                scenes_config[city],
                                split['train'],
                                channel_order=channel_order)
            train_scenes.append(scene)

        if split.get('test'):
            scene = build_scene(remote,
                                test,
                                task,
                                scenes_config[city],
                                split['test'],
                                channel_order=channel_order)
            val_scenes.append(scene)

    return train_scenes, val_scenes
    def with_template(self, template):
        """Use a TFDL config template from dict, string or uri."""
        from rastervision.protos.deeplab.train_pb2 import (
            TrainingParameters as TrainingParametersMsg)

        template_json = None
        if type(template) is dict:
            template_json = template
        else:
            # Try parsing the string as a message, on fail assume it's a URI
            msg = None
            try:
                msg = json_format.Parse(template, TrainingParametersMsg())
            except json_format.ParseError:
                msg = json_format.Parse(file_to_str(template),
                                        TrainingParametersMsg())
            template_json = json_format.MessageToDict(msg)
        b = deepcopy(self)
        b.config['tfdl_config'] = template_json
        return b
示例#13
0
    def get_model_defaults(self):
        """Return the "model defaults"

        The model defaults is a json file that lists a set of default
        configurations for models, per backend and model key.
        There are defaults that are installed with Raster Vision, but
        users can override these defaults with their own by setting
        the "model_defaults_uri" in the [RV] section of
        thier configuration file, or by setting the RV_MODEL_DEFAULT_URI
        environment variable.
        """
        subconfig = self.get_subconfig('RV')
        default_path = os.path.join(os.path.dirname(rv.backend.__file__),
                                    'model_defaults.json')
        model_default_uri = subconfig('model_defaults_uri',
                                      default=default_path)

        model_defaults = json.loads(file_to_str(model_default_uri))

        return model_defaults
    def with_template(self, template):
        """Use a template from the dict, string or uri as the base for the
        Keras Classification API.
        """
        template_json = None
        if type(template) is dict:
            msg = json_format.ParseDict(template, PipelineConfig())

            template_json = json_format.MessageToDict(msg)
        else:
            # Try parsing the string as a message, on fail assume it's a URI
            msg = None
            try:
                msg = json_format.Parse(template, PipelineConfig())
            except json_format.ParseError:
                msg = json_format.Parse(file_to_str(template),
                                        PipelineConfig())
            template_json = json_format.MessageToDict(msg)

        b = deepcopy(self)
        b.config['kc_config'] = template_json
        return b
示例#15
0
def get_scene_info(csv_uri):
    csv_str = file_to_str(csv_uri)
    reader = csv.reader(StringIO(csv_str), delimiter=',')
    return list(reader)
示例#16
0
 def load(stats_uri):
     stats_json = json.loads(file_to_str(stats_uri))
     stats = RasterStats()
     stats.means = stats_json['means']
     stats.stds = stats_json['stds']
     return stats
 def _get_geojson(self):
     geojson = json.loads(file_to_str(self.uri))
     return self.class_inference.transform_geojson(geojson)
 def load(config, iface):
     if config.output_uri:
         Log.log_info("Evaluation: {}".format(config.evaluator_type))
         eval_data = json.loads(file_to_str(config.output_uri))
         Log.log_info(json.dumps(eval_data, indent=2))