def main(unused_argv): assert FLAGS.checkpoint_dir, '`checkpoint_dir` is missing.' assert FLAGS.eval_dir, '`eval_dir` is missing.' tf.gfile.MakeDirs(FLAGS.eval_dir) if FLAGS.pipeline_config_path: configs = config_util.get_configs_from_pipeline_file( FLAGS.pipeline_config_path) tf.gfile.Copy(FLAGS.pipeline_config_path, os.path.join(FLAGS.eval_dir, 'pipeline.config'), overwrite=True) else: configs = config_util.get_configs_from_multiple_files( model_config_path=FLAGS.model_config_path, eval_config_path=FLAGS.eval_config_path, eval_input_config_path=FLAGS.input_config_path) for name, config in [('model.config', FLAGS.model_config_path), ('eval.config', FLAGS.eval_config_path), ('input.config', FLAGS.input_config_path)]: tf.gfile.Copy(config, os.path.join(FLAGS.eval_dir, name), overwrite=True) model_config = configs['model'] eval_config = configs['eval_config'] input_config = configs['eval_input_config'] if FLAGS.eval_training_data: input_config = configs['train_input_config'] model_fn = functools.partial(model_builder.build, model_config=model_config, is_training=False) def get_next(config): return dataset_builder.make_initializable_iterator( dataset_builder.build(config)).get_next() create_input_dict_fn = functools.partial(get_next, input_config) label_map = label_map_util.load_labelmap(input_config.label_map_path) max_num_classes = max([item.id for item in label_map.item]) categories = label_map_util.convert_label_map_to_categories( label_map, max_num_classes) if FLAGS.run_once: eval_config.max_evals = 1 graph_rewriter_fn = None if 'graph_rewriter_config' in configs: graph_rewriter_fn = graph_rewriter_builder.build( configs['graph_rewriter_config'], is_training=False) evaluator.evaluate(create_input_dict_fn, model_fn, eval_config, categories, FLAGS.checkpoint_dir, FLAGS.eval_dir, graph_hook_fn=graph_rewriter_fn)
def test_load_bad_label_map(self): label_map_string = """ item { id:0 name:'class that should not be indexed at zero' } item { id:2 name:'cat' } item { id:1 name:'dog' } """ label_map_path = os.path.join(self.get_temp_dir(), 'label_map.pbtxt') with tf.gfile.Open(label_map_path, 'wb') as f: f.write(label_map_string) with self.assertRaises(ValueError): label_map_util.load_labelmap(label_map_path)
def __fetch_category_indices(): dir_path = TFWeaponDetectionAPI.__get_dir_path() path_to_labels = os.path.join(dir_path + '/data', 'label_map.pbtxt') class_count = 1 label_map = label_map_util.load_labelmap(path_to_labels) categories = label_map_util.convert_label_map_to_categories( label_map, max_num_classes=class_count, use_display_name=True) category_index = label_map_util.create_category_index(categories) category_dict = {} for item in category_index.values(): category_dict[item['id']] = item['name'] category_dict[item['name']] = item['id'] return category_index, category_dict
def __init__(self, model_name=PRETRAINED_ssd_mobilenet_v1_coco_2017_11_17): self.dir_path = dirname(realpath(__file__)) self.model_path = self.dir_path + '/object_detection/pretrained/' self.model_file = model_name + '.tar.gz' self.download_base = 'http://download.tensorflow.org/models/object_detection/' self.path_to_frozen_graph = self.model_path + model_name + '/frozen_inference_graph.pb' path_to_labels = os.path.join(self.dir_path + '/object_detection/data', 'mscoco_label_map.pbtxt') self.class_count = 90 if not path.exists(self.path_to_frozen_graph): self.__download() self.__load() self.label_map = label_map_util.load_labelmap(path_to_labels) self.categories = label_map_util.convert_label_map_to_categories( self.label_map, max_num_classes=self.class_count, use_display_name=True) self.category_index = label_map_util.create_category_index( self.categories) self.inPipe = Pipe() self.outPipe = Pipe()