def to_proto(self): msg = super().to_proto() conf = struct_pb2.Struct() conf['json'] = json.dumps(self.config) msg.MergeFrom(CommandConfigMsg(custom_config=conf)) return msg
def to_proto(self): """Returns the protobuf configuration for this config. """ plugin_config = PluginRegistry.get_instance().to_proto() return CommandConfigMsg(command_type=self.command_type, root_uri=self.root_uri, split_id=self.split_id, plugins=plugin_config)
def to_proto(self): result = self.mock.to_proto() if result is None: msg = super().to_proto() msg.MergeFrom(CommandConfigMsg(custom_config=struct_pb2.Struct())) return msg else: return result
def test_bundle_od_command(self): def get_task(tmp_dir): predict_package_uri = os.path.join(tmp_dir, 'predict_package.zip') t = rv.TaskConfig.builder(rv.OBJECT_DETECTION) \ .with_predict_package_uri(predict_package_uri) \ .with_classes(['class1']) \ .build() return t def get_backend(task, tmp_dir): model_uri = os.path.join(tmp_dir, 'model') template_uri = data_file_path( 'tf_object_detection/embedded_ssd_mobilenet_v1_coco.config') with open(model_uri, 'w') as f: f.write('DUMMY') b = rv.BackendConfig.builder(rv.TF_OBJECT_DETECTION) \ .with_task(task) \ .with_template(template_uri) \ .with_model_uri(model_uri) \ .build() return b with RVConfig.get_tmp_dir() as tmp_dir: task = get_task(tmp_dir) backend = get_backend(task, tmp_dir) analyzer = self.get_analyzer(tmp_dir) scene = self.get_scene(tmp_dir) cmd = rv.CommandConfig.builder(rv.BUNDLE) \ .with_task(task) \ .with_root_uri(tmp_dir) \ .with_backend(backend) \ .with_analyzers([analyzer]) \ .with_scene(scene) \ .build() \ .create_command() cmd.run(tmp_dir) package_dir = os.path.join(tmp_dir, 'package') make_dir(package_dir) with zipfile.ZipFile(task.predict_package_uri, 'r') as package_zip: package_zip.extractall(path=package_dir) bundle_config_path = os.path.join(package_dir, 'bundle_config.json') bundle_config = load_json_config(bundle_config_path, CommandConfigMsg()) self.assertEqual(bundle_config.command_type, rv.BUNDLE) actual = set(os.listdir(package_dir)) expected = set(['stats.json', 'model', 'bundle_config.json']) self.assertEqual(actual, expected)
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() scenes = list(map(lambda s: s.to_proto(), self.scenes)) analyzers = list(map(lambda a: a.to_proto(), self.analyzers)) msg.MergeFrom( CommandConfigMsg(analyze_config=CommandConfigMsg.AnalyzeConfig( task=task, scenes=scenes, analyzers=analyzers))) return msg
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() scenes = list(map(lambda s: s.to_proto(), self.scenes)) evaluators = list(map(lambda e: e.to_proto(), self.evaluators)) msg.MergeFrom( CommandConfigMsg(eval_config=CommandConfigMsg.EvalConfig( task=task, scenes=scenes, evaluators=evaluators))) return msg
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() backend = self.backend.to_proto() scenes = list(map(lambda s: s.to_proto(), self.scenes)) msg.MergeFrom( CommandConfigMsg(predict_config=CommandConfigMsg.PredictConfig( task=task, backend=backend, scenes=scenes))) return msg
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() backend = self.backend.to_proto() msg.MergeFrom( CommandConfigMsg( train_config=CommandConfigMsg.TrainConfig( task=task, backend=backend))) return msg
def test_bundle_cc_command(self): def get_task(tmp_dir): predict_package_uri = os.path.join(tmp_dir, 'predict_package.zip') t = rv.TaskConfig.builder(rv.CHIP_CLASSIFICATION) \ .with_predict_package_uri(predict_package_uri) \ .with_classes(['class1']) \ .build() return t def get_backend(task, tmp_dir): model_uri = os.path.join(tmp_dir, 'model') with open(model_uri, 'w') as f: f.write('DUMMY') b = rv.BackendConfig.builder(rv.KERAS_CLASSIFICATION) \ .with_task(task) \ .with_model_defaults(rv.RESNET50_IMAGENET) \ .with_model_uri(model_uri) \ .build() return b with RVConfig.get_tmp_dir() as tmp_dir: task = get_task(tmp_dir) backend = get_backend(task, tmp_dir) analyzer = self.get_analyzer(tmp_dir) scene = self.get_scene(tmp_dir) cmd = rv.CommandConfig.builder(rv.BUNDLE) \ .with_task(task) \ .with_root_uri(tmp_dir) \ .with_backend(backend) \ .with_analyzers([analyzer]) \ .with_scene(scene) \ .build() \ .create_command(tmp_dir) cmd.run(tmp_dir) package_dir = os.path.join(tmp_dir, 'package') make_dir(package_dir) with zipfile.ZipFile(task.predict_package_uri, 'r') as package_zip: package_zip.extractall(path=package_dir) bundle_config_path = os.path.join(package_dir, 'bundle_config.json') bundle_config = load_json_config(bundle_config_path, CommandConfigMsg()) self.assertEqual(bundle_config.command_type, rv.BUNDLE) actual = set(os.listdir(package_dir)) expected = set(['stats.json', 'model', 'bundle_config.json']) self.assertEqual(actual, expected)
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() backend = self.backend.to_proto() train_scenes = list(map(lambda s: s.to_proto(), self.train_scenes)) val_scenes = list(map(lambda s: s.to_proto(), self.val_scenes)) msg.MergeFrom( CommandConfigMsg(chip_config=CommandConfigMsg.ChipConfig( task=task, backend=backend, train_scenes=train_scenes, val_scenes=val_scenes))) return msg
def to_proto(self): msg = super().to_proto() task = self.task.to_proto() backend = self.backend.to_proto() scene = self.scene.to_proto() analyzers = list(map(lambda a: a.to_proto(), self.analyzers)) b = CommandConfigMsg.BundleConfig(task=task, backend=backend, scene=scene, analyzers=analyzers) msg.MergeFrom(CommandConfigMsg(bundle_config=b)) return msg
def to_proto(self): msg = super().to_proto() msg.MergeFrom(CommandConfigMsg(custom_config=struct_pb2.Struct())) return msg
def run(command_config_uri): msg = load_json_config(command_config_uri, CommandConfigMsg()) CommandRunner.run_from_proto(msg)
def __init__(self, prediction_package_uri, tmp_dir, update_stats=False, channel_order=None): """Creates a new Predictor. Args: prediction_package_uri: The URI of the prediction package to use. Can be any type of URI that Raster Vision can read. tmp_dir: Temporary directory in which to store files that are used by the Predictor. This directory is not cleaned up by this class. update_stats: Option indicating if any Analyzers should be run on the image to be predicted on. Otherwise, the Predictor will use the output of Analyzers that are bundled with the predict package. This is useful, for instance, if you are predicting against imagery that needs to be normalized with a StatsAnalyzer, and the color profile of the new imagery is significantly different then the imagery the model was trained on. channel_order: Option for a new channel order to use for the imagery being predicted against. If not present, the channel_order from the original configuration in the predict package will be used. """ self.tmp_dir = tmp_dir self.update_stats = update_stats self.model_loaded = False package_zip_path = download_if_needed(prediction_package_uri, tmp_dir) package_dir = os.path.join(tmp_dir, 'package') make_dir(package_dir) with zipfile.ZipFile(package_zip_path, 'r') as package_zip: package_zip.extractall(path=package_dir) # Read bundle command config bundle_config_path = os.path.join(package_dir, 'bundle_config.json') msg = load_json_config(bundle_config_path, CommandConfigMsg()) bundle_config = msg.bundle_config self.task_config = rv.TaskConfig.from_proto(bundle_config.task) \ .load_bundle_files(package_dir) self.backend_config = rv.BackendConfig.from_proto(bundle_config.backend) \ .load_bundle_files(package_dir) scene_config = rv.SceneConfig.from_proto(bundle_config.scene) scene_builder = scene_config.load_bundle_files(package_dir) \ .to_builder() \ .clear_label_source() \ .clear_aois() \ .with_id('PREDICTOR') # If the scene does not have a label store, generate a default one. if not scene_config.label_store: scene_builder = scene_builder.with_task(self.task_config) \ .with_label_store() if channel_order: raster_source = scene_builder.config['raster_source'] \ .to_builder() \ .with_channel_order(channel_order) \ .build() scene_builder = scene_builder.with_raster_source(raster_source) self.scene_config = scene_builder.build() self.analyzer_configs = [] if update_stats: for analyzer in bundle_config.analyzers: a = rv.AnalyzerConfig.from_proto(analyzer) \ .load_bundle_files(package_dir) self.analyzer_configs.append(a) self.bundle_config = rv.command.CommandConfig \ .from_proto(msg) \ .to_builder() \ .with_task(self.task_config) \ .with_backend(self.backend_config) \ .with_scene(self.scene_config) \ .with_analyzers(self.analyzer_configs) \ .build()
def run(self): self.dlg.show() settings = Settings() self.dlg.predict_package_edit.setText(settings.get_predict_package()) self.dlg.style_profile_combobox.clear() profiles = settings.get_style_profiles() profiles.insert(0, StyleProfile.EMPTY()) profile_names = list(map(lambda p: p.name, profiles)) self.dlg.style_profile_combobox.addItems(profile_names) settings_profile = settings.get_predict_profile() if settings_profile in profile_names: self.dlg.style_profile_combobox.setCurrentIndex( profile_names.index(settings_profile)) else: self.dlg.style_profile_combobox.setCurrentIndex(0) self.dlg.label_uri_edit.setText(settings.get_label_store_uri()) self.dlg.update_stats_checkbox.setChecked(settings.get_update_stats()) self.dlg.use_docker_checkbox.setChecked(settings.get_use_docker()) self.dlg.docker_image_edit.setText(settings.get_docker_image()) # Load all raster layers self.dlg.input_layer_combobox.clear() raster_layers = get_raster_layers() layer_names = [] for name in raster_layers: layer_names.append(name) self.dlg.input_layer_combobox.addItem(name) result = self.dlg.exec_() if result: predict_package = self.dlg.predict_package_edit.text() settings.set_predict_package(predict_package) layer_name_idx = self.dlg.input_layer_combobox.currentIndex() layer_name = layer_names[layer_name_idx] layer = raster_layers[layer_name] style_profile_index = self.dlg.style_profile_combobox.currentIndex( ) settings.set_predict_profile(profile_names[style_profile_index]) style_profile = None if not style_profile_index == 0: style_profile = profiles[style_profile_index] label_store_uri = self.dlg.label_uri_edit.text() settings.set_label_store_uri(label_store_uri) update_stats = self.dlg.update_stats_checkbox.checkState() settings.set_update_stats(update_stats) use_docker = self.dlg.use_docker_checkbox.checkState() settings.set_use_docker(use_docker) docker_image = self.dlg.docker_image_edit.text() settings.set_docker_image(docker_image) prediction_layer_name = '{}-predictions'.format(layer_name) with TemporaryDirectory(dir=settings.get_working_dir()) as tmp_dir: path = os.path.join(tmp_dir, "{}.tif".format(layer_name)) export_raster_layer(layer, path) if not os.path.exists(path): raise Exception("Writing raster to {} failed".format(path)) bundle_config_base = 'bundle_config.json' bundle_config_path = os.path.join(tmp_dir, bundle_config_base) # Grab the predict package locally pp_local = download_if_needed(predict_package, tmp_dir) if use_docker: pp_dir = os.path.dirname(pp_local) pp_base = os.path.basename(pp_local) gt_base = os.path.basename(path) lb_dir = os.path.dirname(label_store_uri) lb_base = os.path.basename(label_store_uri) cmd = [ 'docker', 'run', '--rm', '-v', '{}/.rastervision:/root/.rastervision'.format( os.environ['HOME']), '-v', '{}:{}'.format(tmp_dir, '/opt/source'), '-v', '{}:{}'.format(lb_dir, '/opt/output'), '-v', '{}:{}'.format(pp_dir, '/opt/predict_package'), docker_image, 'rastervision', 'predict', '/opt/predict_package/{}'.format(pp_base), '/opt/source/{}'.format(gt_base), '/opt/output/{}'.format(lb_base), '--export-config', '/opt/source/{}'.format(bundle_config_base) ] if update_stats: cmd.append('--update-stats') Log.log_info('Running command: {}'.format(' '.join(cmd))) try: output = check_output(cmd) Log.log_info("[PREDICT OUTPUT]: {}".format(output)) except CalledProcessError as e: Log.log_error("Error running {}: {}".format( ' '.join(cmd), e.output)) raise e else: predictor = Predictor(pp_local, tmp_dir, update_stats=update_stats) predictor.predict(path, label_store_uri, bundle_config_path) msg = load_json_config(bundle_config_path, CommandConfigMsg()) bundle_config = msg.bundle_config task_config = rv.TaskConfig.from_proto(bundle_config.task) scene_config = rv.SceneConfig.from_proto(bundle_config.scene) label_store_config = scene_config.label_store.for_prediction( label_store_uri) # Load prediction config = label_store_config loader = RegistryInstance.get().get_label_store_loader( config.store_type) ctx = LoadContext(task_config, self.iface, style_profile, None) style_file = None if ctx.style_profile: style_file = ctx.style_profile.prediction_style_file loader.load(config, prediction_layer_name, ctx, style_file)