Example #1
0
    def test_debug_info(self):
        engine = BasicEngine(
            test_utils.test_data_path('mobilenet_v1_1.0_224_quant.tflite'))
        # Check model's input format.
        input_tensor_shape = engine.get_input_tensor_shape()
        self.assertListEqual([1, 224, 224, 3], input_tensor_shape.tolist())
        self.assertEqual(224 * 224 * 3, engine.required_input_array_size())

        # Check model's output.
        output_tensors_sizes = engine.get_all_output_tensors_sizes()
        self.assertListEqual([1001], output_tensors_sizes.tolist())
        self.assertEqual(1, engine.get_num_of_output_tensors())
        self.assertEqual(1001, engine.get_output_tensor_size(0))
        self.assertEqual(1001, engine.total_output_array_size())

        # Check SSD model.
        ssd_engine = BasicEngine(
            test_utils.test_data_path(
                'mobilenet_ssd_v1_coco_quant_postprocess.tflite'))
        # Check model's input format.
        input_tensor_shape = ssd_engine.get_input_tensor_shape()
        self.assertListEqual([1, 300, 300, 3], input_tensor_shape.tolist())
        self.assertEqual(300 * 300 * 3, ssd_engine.required_input_array_size())

        # Check model's output.
        output_tensors_sizes = ssd_engine.get_all_output_tensors_sizes()
        self.assertListEqual([80, 20, 20, 1], output_tensors_sizes.tolist())
        self.assertEqual(4, ssd_engine.get_num_of_output_tensors())
        self.assertEqual(80, ssd_engine.get_output_tensor_size(0))
        self.assertEqual(20, ssd_engine.get_output_tensor_size(1))
        self.assertEqual(20, ssd_engine.get_output_tensor_size(2))
        self.assertEqual(1, ssd_engine.get_output_tensor_size(3))
        self.assertEqual(121, ssd_engine.total_output_array_size())
def _GetOutputNumberClasses(model_path):
  """Gets the number of output classes.
  Args:
    model_path: string, path of the model.
  Returns:
    int, number of the output classes.
  """
  tmp = BasicEngine(model_path)
  assert tmp.get_num_of_output_tensors() == 1
  return tmp.total_output_array_size()
Example #3
0
 def test_run_inference(self):
     for model in test_utils.get_model_list():
         print('Testing model :', model)
         engine = BasicEngine(test_utils.test_data_path(model))
         input_data = test_utils.generate_random_input(
             1, engine.required_input_array_size())
         latency, ret = engine.run_inference(input_data)
         self.assertEqual(ret.size, engine.total_output_array_size())
         # Check debugging functions.
         self.assertLess(math.fabs(engine.get_inference_time() - latency),
                         0.001)
         raw_output = engine.get_raw_output()
         self.assertEqual(ret.size, raw_output.size)
         for i in range(ret.size):
             if math.isnan(ret[i]) and math.isnan(raw_output[i]):
                 continue
             self.assertLess(math.fabs(ret[i] - raw_output[i]), 0.001)
Example #4
0
 def __init__(self, model_path, keep_classes=False):
     """
 Args:
   model_path (str): Path to the model you want to retrain. This model must be a ``.tflite``
     file output by the ``join_tflite_models`` tool. For more information about how to create a
     compatible model, read `Retrain an image classification model on-device
     <https://coral.ai/docs/edgetpu/retrain-classification-ondevice/>`_.
   keep_classes (bool): If True, keep the existing classes from the pre-trained model (and use
     training to add additional classes). If False, drop the existing classes and train the model
     to include new classes only.
 """
     self._engine = ImprintingEnginePythonWrapper.CreateFromFile(
         model_path, keep_classes)
     self._num_classes = 0
     if keep_classes:
         tmp = BasicEngine(model_path)
         assert tmp.get_num_of_output_tensors() == 1
         self._num_classes = tmp.total_output_array_size()