예제 #1
0
    def __init__(self):
        #TODO load classifier

        # relative to file path, otherwise we have got problems with launch-type specific working directory
        base_path = os.path.dirname(os.path.abspath(__file__))
        graph_pth = os.path.join(base_path, 'fine_tuned_model_real_mobilenet', 'frozen_inference_graph.pb')

        label_pth = os.path.join(base_path, 'fine_tuned_model_real_mobilenet', 'labels_map.pbtxt')


        self.graph_pth = graph_pth
    
        self.detection_graph = self.load_tf_graph(self.graph_pth)

        self.label_pth = label_pth
        
        
        self.label_map = label_map_util.load_labelmap(label_pth)
        self.categories = label_map_util.convert_label_map_to_categories(self.label_map, max_num_classes=3, use_display_name=True)

        self.category_index = label_map_util.create_category_index(self.categories)

        self.class_map = {
            1: TrafficLight.GREEN,
            2: TrafficLight.YELLOW,
            3: TrafficLight.RED
        }
def class_text_to_int(row_label):
    label = lmu.load_labelmap('/images/labelmap.pbtxt')
    for lb in label.item:
        if row_label == lb.name:
            return lb.id
        else:
            None
예제 #3
0
    def Open(self):
        #Loading label map
        self.label_map = label_map_util.load_labelmap(self.PATH_TO_LABELS)
        self.categories = label_map_util.convert_label_map_to_categories(self.label_map, max_num_classes=self.NUM_CLASSES, use_display_name=True)
        self.category_index = label_map_util.create_category_index(self.categories)
 
        #Load a (frozen) Tensorflow model into memory.
        detection_graph = tf.Graph()
        with detection_graph.as_default():
            od_graph_def = tf.GraphDef()
            with tf.gfile.GFile(self.PATH_TO_CKPT, 'rb') as fid:
                serialized_graph = fid.read()
                od_graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(od_graph_def, name='')
        config = tf.ConfigProto(
            device_count = {'GPU': 0}
        )

        detection_graph.as_default()
        self.detector = tf.Session(graph=detection_graph, config=config)
        # Definite input and output Tensors for detection_graph
        self.image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
        # Each box represents a part of the image where a particular object was detected.
        self.detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
        # Each score represent how level of confidence for each of the objects.
        # Score is shown on the result image, together with the class label.
        self.detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
        self.detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
        self.num_detections = detection_graph.get_tensor_by_name('num_detections:0')
예제 #4
0
def set_model(model_name):
    for file in glob.glob("*"):
        if (file == model_name):
            pass

    # Path to frozen detection graph. This is the actual model that is used for the object detection.
    path_to_ckpt = model_name + '/frozen_inference_graph.pb'

    # List of the strings that is used to add correct label for each box.
    path_to_labels = os.path.join('data', 'mscoco_label_map.pbtxt')

    num_classes = 90

    # Load a (frozen) Tensorflow model into memory.
    detection_graph = tf.Graph()
    with detection_graph.as_default():
        od_graph_def = tf.GraphDef()
        with tf.gfile.GFile(path_to_ckpt, 'rb') as fid:
            serialized_graph = fid.read()
            od_graph_def.ParseFromString(serialized_graph)
            tf.import_graph_def(od_graph_def, name='')

    # Loading label map
    # Label maps map indices to category names, so that when our convolution network predicts 5, we know that this corresponds to airplane. Here I 		use internal utility functions, but anything that returns a dictionary mapping integers to appropriate string labels would be fine
    label_map = label_map_util.load_labelmap(path_to_labels)
    categories = label_map_util.convert_label_map_to_categories(
        label_map, max_num_classes=num_classes, use_display_name=True)
    category_index = label_map_util.create_category_index(categories)

    return detection_graph, category_index
예제 #5
0
파일: inference.py 프로젝트: rkeen9/polaris
    def __init__(self):
        # Get paths
        TFWD_PATH = os.path.join(os.path.dirname(__file__), 'tf')
        PATH_TO_CKPT = os.path.join(TFWD_PATH,'frozen_inference_graph.pb')
        PATH_TO_LABELS = os.path.join(TFWD_PATH,'annotation.pbtxt')
        
        # get our label map
        label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
        categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=3, use_display_name=True)
        self.category_index = label_map_util.create_category_index(categories)

        # Load the Tensorflow model into memory.
        detection_graph = tf.Graph()
        with detection_graph.as_default():
            od_graph_def = tf.GraphDef()
            with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
                serialized_graph = fid.read()
                od_graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(od_graph_def, name='')
            # get our graph and save to class
            self.sess = tf.Session(graph=detection_graph)

        # Define input and output tensors for the object detection classifier
        self.it = detection_graph.get_tensor_by_name('image_tensor:0')
        self.db = detection_graph.get_tensor_by_name('detection_boxes:0')
        self.ds = detection_graph.get_tensor_by_name('detection_scores:0')
        self.dc = detection_graph.get_tensor_by_name('detection_classes:0')
        self.nd = detection_graph.get_tensor_by_name('num_detections:0')
예제 #6
0
    def _load_mapphx(self):
        label_map = label_map_util.load_labelmap(self.PATH_TO_LABELS)

        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=self.NUM_CLASSES, use_display_name=True)
        category_index = label_map_util.create_category_index(categories)
        return category_index
예제 #7
0
def load_model():
    # 添加模型路径:
    CWD_PATH = os.getcwd()  # os.getcwd() 方法用于返回当前工作目录。
    PATH_TO_CKPT = os.path.join(CWD_PATH,
                                '../ssd_mobilenet_v1_coco_2018_01_28',
                                'frozen_inference_graph.pb')

    # List of the strings that is used to add correct label for each box.
    PATH_TO_LABELS = os.path.join(CWD_PATH, 'data', 'mscoco_label_map.pbtxt')

    NUM_CLASSES = 90

    # 加载模型
    detection_graph = tf.Graph()
    with detection_graph.as_default():
        od_graph_def = tf.GraphDef()
        with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
            serialized_graph = fid.read()
            od_graph_def.ParseFromString(serialized_graph)

            tf.import_graph_def(od_graph_def, name='')

    # 加载lable map
    label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
    categories = label_map_util.convert_label_map_to_categories(
        label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
    category_index = label_map_util.create_category_index(categories)
    print('models have already loaded.')
    return detection_graph, category_index
    def __displayDetectedImage(self, item):
        dcmFileName = item.data(0, Qt.UserRole)
        self.ui.statusBar.showMessage(dcmFileName)

        #预处理
        dcmFile = pydicom.read_file(dcmFileName)
        origin = dcmFile.pixel_array  # type:numpy.ndarray

        intercept = dcmFile.RescaleIntercept
        slope = dcmFile.RescaleSlope
        origin = origin * slope + intercept
        origin[origin < -100] = -100  # 去除低亮部分
        origin[origin > 750] = 750  # 去除高亮部分

        # 归一化到0-255
        origin = cv2.normalize(origin,
                               None,
                               0,
                               255,
                               norm_type=cv2.NORM_MINMAX,
                               dtype=cv2.CV_8UC3)
        image_np = np.expand_dims(origin, -1)  # 将origin增加一个维度
        image_np = np.repeat(image_np, 3, 2)  # 将二维矩阵重复三次

        #plt.imsave("output/undetected.jpg", image_np)

        NUM_CLASSES = 6
        PATH_TO_LABELS = 'label_map.pbtxt'
        label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
        category_index = label_map_util.create_category_index(categories)

        result = item.data(1, Qt.UserRole)
        boxes = result[1]
        scores = result[2]
        classes = result[3]

        vis_util.visualize_boxes_and_labels_on_image_array(
            image_np,
            boxes,
            classes,
            scores,
            category_index,
            use_normalized_coordinates=True,
            line_thickness=1)

        #plt.imsave("output/detected.jpg", image_np)
        image = QImage(image_np, image_np.shape[1], image_np.shape[0],
                       QImage.Format_RGB888)

        self.curPixmap = QPixmap(image)
        self.on_actZoomFitH_triggered()

        self.ui.actZoomIn.setEnabled(True)
        self.ui.actZoomOut.setEnabled(True)
        self.ui.actZoomRealSize.setEnabled(True)
        self.ui.actZoomFitW.setEnabled(True)
        self.ui.actZoomFitH.setEnabled(True)
예제 #9
0
    def __init__(self):

        self.saved_image_limit = 500
        self.saved_image_counter = 1
        self.save_images = rospy.get_param("~save_image", False)
        self.current_light = TrafficLight.UNKNOWN
        self.model_path = rospy.get_param('~model')
        self.readsize = 1024

        # Build the model
        self.detection_graph = tf.Graph()
        # create config
        config = tf.ConfigProto()

        # reassemble the model from chunks. credit goes to team vulture for this idea
        if not os.path.exists(self.model_path):
            if not os.path.exists(self.model_path+'/chunks'):
                output = open(self.model_path, 'wb')
                frozen_model_path = os.path.dirname(self.model_path)+'/frozen_model_chunks'
                chunks = os.listdir(frozen_model_path)
                chunks.sort()
                for filename in chunks:
                    filepath = os.path.join(frozen_model_path, filename)
                    with open(filepath, 'rb') as fileobj:
                        for chunk in iter(partial(fileobj.read, self.readsize), ''):
                            output.write(chunk)
                output.close()

        # Create the graph
        with self.detection_graph.as_default():
            graph_def = tf.GraphDef()
            with tf.gfile.GFile(self.model_path, 'rb') as fid:
                serialized_graph = fid.read()
                graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(graph_def, name='')
                rospy.loginfo('Loaded frozen tensorflow model: %s', self.model_path)

            # Create a reusable sesion attribute
            self.sess = tf.Session(graph=self.detection_graph, config=config)

        self.image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0')
        self.detection_boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0')
        self.detection_scores = self.detection_graph.get_tensor_by_name('detection_scores:0')
        self.detection_classes = self.detection_graph.get_tensor_by_name('detection_classes:0')
        self.num_detections = self.detection_graph.get_tensor_by_name('num_detections:0')
        self.path = './light_classification/UTL_label_map.pbtxt'
        print(self.path)

        PATH_TO_LABELS = self.path
        NUM_CLASSES = 3

        label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
        categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES,
                                                                    use_display_name=True)
        self.category_index = label_map_util.create_category_index(categories)
        self.count = 1
예제 #10
0
def main(_):
    # step 2: send a request
    options = [('grpc.max_send_message_length', 1000 * 1024 * 1024),
               ('grpc.max_receive_message_length', 1000 * 1024 * 1024)]
    channel = grpc.insecure_channel(FLAGS.server, options=options)
    stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
    request = predict_pb2.PredictRequest()
    request.model_spec.name = 'model'
    request.model_spec.signature_name = 'serving_default'

    # step 1: prepare input
    img = cv2.imread(FLAGS.image)
    h, w = img.shape[:2]
    if compress:
        ratio = w / h
        h1 = height
        w1 = round(h1 * ratio)
        scaled_img = cv2.resize(img, (w1, h1), interpolation=cv2.INTER_AREA)
        tensor = tf.contrib.util.make_tensor_proto(scaled_img,
                                                   shape=[1] +
                                                   list(scaled_img.shape))
    else:
        tensor = tf.contrib.util.make_tensor_proto(img,
                                                   shape=[1] + list(img.shape))

    request.inputs['inputs'].CopyFrom(tensor)
    start = time.time()

    # step 3: get the results
    result_future = stub.Predict.future(request, 10.0)  # 10 secs timeout
    result = result_future.result()

    stop = time.time()
    print('time is ', stop - start)

    NUM_CLASSES = 30
    label_map = label_map_util.load_labelmap('annotations/label_map.pbtxt')
    categories = label_map_util.convert_label_map_to_categories(
        label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
    category_index = label_map_util.create_category_index(categories)

    boxes = result.outputs['detection_boxes'].float_val
    classes = result.outputs['detection_classes'].float_val
    scores = result.outputs['detection_scores'].float_val

    result = vis_util.visualize_boxes_and_labels_on_image_array(
        img,
        np.reshape(boxes, [100, 4]),
        np.squeeze(classes).astype(np.int32),
        np.squeeze(scores),
        category_index,
        use_normalized_coordinates=True,
        line_thickness=8)

    cv2.imwrite('result.jpg', result)
예제 #11
0
def get_labeled_image(image_path, path_to_labels, num_classes, boxes, classes,
                      scores):
    label_map = label_map_util.load_labelmap(path_to_labels)
    categories = label_map_util.convert_label_map_to_categories(
        label_map, max_num_classes=num_classes, use_display_name=True)
    category_index = label_map_util.create_category_index(categories)
    image = Image.open(image_path)
    image_np = load_image_into_numpy_array(image)
    image_process = vis_util.visualize_boxes_and_labels_on_image_array(
        image_np, boxes, classes, scores, category_index)
    return image_process
예제 #12
0
    def get_category_index(self):
        """ Transforms label map into category index for visualization.

            Returns:
                category_index: The category index corresponding to the given
                    label map.
        """
        label_map = label_map_util.load_labelmap(self.label_path)
        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=1, use_display_name=True)
        category_index = label_map_util.create_category_index(categories)
        return category_index
예제 #13
0
def _init_category_index(label_map_path):
  """Creates category index from class indexes to name of the classes.

  Args:
    label_map_path: path to the mapping.
  Returns:
    A map for mapping int keys to string categories.
  """

  label_map = label_map_util.load_labelmap(label_map_path)
  num_classes = np.max(x.id for x in label_map.item)
  categories = label_map_util.convert_label_map_to_categories(
      label_map, max_num_classes=num_classes, use_display_name=True)
  category_index = label_map_util.create_category_index(categories)
  return category_index
예제 #14
0
def main(unused_argv):
  assert FLAGS.checkpoint_dir, '`checkpoint_dir` is missing.'
  assert FLAGS.eval_dir, '`eval_dir` is missing.'
  tf.gfile.MakeDirs(FLAGS.eval_dir)
  if FLAGS.pipeline_config_path:
    configs = config_util.get_configs_from_pipeline_file(
        FLAGS.pipeline_config_path)
    tf.gfile.Copy(FLAGS.pipeline_config_path,
                  os.path.join(FLAGS.eval_dir, 'pipeline.config'),
                  overwrite=True)
  else:
    configs = config_util.get_configs_from_multiple_files(
        model_config_path=FLAGS.model_config_path,
        eval_config_path=FLAGS.eval_config_path,
        eval_input_config_path=FLAGS.input_config_path)
    for name, config in [('model.config', FLAGS.model_config_path),
                         ('eval.config', FLAGS.eval_config_path),
                         ('input.config', FLAGS.input_config_path)]:
      tf.gfile.Copy(config,
                    os.path.join(FLAGS.eval_dir, name),
                    overwrite=True)

  model_config = configs['model']
  eval_config = configs['eval_config']
  if FLAGS.eval_training_data:
    input_config = configs['train_input_config']
  else:
    input_config = configs['eval_input_config']

  model_fn = functools.partial(
      model_builder.build,
      model_config=model_config,
      is_training=False)

  create_input_dict_fn = functools.partial(
      input_reader_builder.build,
      input_config)

  label_map = label_map_util.load_labelmap(input_config.label_map_path)
  max_num_classes = max([item.id for item in label_map.item])
  categories = label_map_util.convert_label_map_to_categories(
      label_map, max_num_classes)

  if FLAGS.run_once:
    eval_config.max_evals = 1

  evaluator.evaluate(create_input_dict_fn, model_fn, eval_config, categories,
                     FLAGS.checkpoint_dir, FLAGS.eval_dir)
예제 #15
0
    def load(self):
        config = tf.ConfigProto()
        config.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1

        self.detection_graph = tf.Graph()
        with tf.Session(graph=self.detection_graph, config=config) as sess:
            self.session = sess
            od_graph_def = tf.GraphDef()
            with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
                serialized_graph = fid.read()
                od_graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(od_graph_def, name='')

        label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
        categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
        self.category_index = label_map_util.create_category_index(categories)
예제 #16
0
    def __init__(self, model_path, label_path):
        #TODO load classifier
        self.label_map = label_map_util.load_labelmap(label_path)
        self.categories = label_map_util.convert_label_map_to_categories(
            self.label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
        self.category_index = label_map_util.create_category_index(
            self.categories)
        self.model_path = model_path

        self.detection_graph = None
        self.tf_session = None
        self.image_tensor = None
        self.detection_boxes = None
        self.detection_scores = None
        self.detection_classes = None
        self.num_detections = None
예제 #17
0
    def __init__(self):
        self.modelpath = rospy.get_param('~model_path')
        # PATH_TO_MODEL = 'models/trial19_ssd_inception_sim_frozen_inference_graph.pb' # model resnet-udacity-sim-large-10-regions
        PATH_TO_MODEL = self.modelpath

        self.saved_image_limit = 500
        self.saved_image_counter = 1
        self.save_images = False

        self.readsize = 1024

        # Build the model
        self.detection_graph = tf.Graph()

        # Create the graph
        with self.detection_graph.as_default():
            graph_def = tf.GraphDef()
            with tf.gfile.GFile(PATH_TO_MODEL, 'rb') as fid:
                serialized_graph = fid.read()
                graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(graph_def, name='')

            self.image_tensor = self.detection_graph.get_tensor_by_name(
                'image_tensor:0')
            self.detection_boxes = self.detection_graph.get_tensor_by_name(
                'detection_boxes:0')
            self.detection_scores = self.detection_graph.get_tensor_by_name(
                'detection_scores:0')
            self.detection_classes = self.detection_graph.get_tensor_by_name(
                'detection_classes:0')
            self.num_detections = self.detection_graph.get_tensor_by_name(
                'num_detections:0')
            self.path = './light_classification/Conversion_label_map.pbtxt'

        # Create a reusable sesion attribute
        self.sess = tf.Session(graph=self.detection_graph)

        PATH_TO_LABELS = self.path
        NUM_CLASSES = 4

        label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=NUM_CLASSES, use_display_name=True)
        self.category_index = label_map_util.create_category_index(categories)
        self.count = 1
예제 #18
0
 def __init__(self, num_class):
     threading.Thread.__init__(self)
     self.map = {1: "bottle", 2: "speaker", 3: "scissors"}
     self.current_Path = os.getcwd()
     self.Load_from_location = os.path.join(self.current_Path, 'images')
     self.Store_Search = store_Search(
         os.path.join(self.current_Path, 'static'))
     self.NUM_CLASSES = num_class
     self.PATH_TO_LABELS = os.path.join(self.current_Path, 'labelmap.pbtxt')
     self.label_map = label_map_util.load_labelmap(self.PATH_TO_LABELS)
     self.PATH_TO_CKPT = os.path.join(self.current_Path,
                                      'frozen_inference_graph.pb')
     self.categories = label_map_util.convert_label_map_to_categories(
         self.label_map,
         max_num_classes=self.NUM_CLASSES,
         use_display_name=True)
     self.category_index = label_map_util.create_category_index(
         self.categories)
    def init_detector(self):

        # load graph and label map from default folder
        if self.graph_path is None:
            self.graph_path = './frozen_model/frozen_inference_graph.pb'
        if self.label_path is None:
            self.label_path = './annotations/label_map.pbtxt'

        # check existence of the two files
        if not os.path.exists(self.graph_path):
            raise IOError('Invalid detector_graph path! {}'.format(
                self.graph_path))
        if not os.path.exists(self.label_path):
            raise IOError('Invalid label path! {}'.format(self.label_path))

        # load frozen graph
        detection_graph = tf.Graph()
        with detection_graph.as_default():
            od_graph_def = tf.GraphDef()
            with tf.gfile.GFile(self.graph_path, 'rb') as fid:
                serialized_graph = fid.read()
                od_graph_def.ParseFromString(serialized_graph)
                tf.import_graph_def(od_graph_def, name='')
        self.session = tf.Session(graph=detection_graph)

        # prepare input and output request
        self.input = detection_graph.get_tensor_by_name('image_tensor:0')
        self.output.append(
            detection_graph.get_tensor_by_name('detection_boxes:0'))
        self.output.append(
            detection_graph.get_tensor_by_name('detection_scores:0'))
        self.output.append(
            detection_graph.get_tensor_by_name('detection_classes:0'))
        self.output.append(
            detection_graph.get_tensor_by_name('num_detections:0'))

        # Load label map
        label_map = label_map_util.load_labelmap(self.label_path)
        categories = label_map_util.convert_label_map_to_categories(
            label_map, max_num_classes=self.class_num, use_display_name=True)
        self.category_index = label_map_util.create_category_index(categories)