Example #1
0
  def testEndToEndSuccess(self):
    logging.info("Using testdata in %s", self.get_temp_dir())
    avg_model_dir = self._export_global_average_model()
    image_dir = self._write_cmy_dataset()
    saved_model_dir = os.path.join(self.get_temp_dir(), "final_saved_model")
    saved_model_expected_file = os.path.join(saved_model_dir, "saved_model.pb")
    tflite_output_file = os.path.join(self.get_temp_dir(), "final_model.tflite")
    labels_output_file = os.path.join(self.get_temp_dir(), "labels.txt")
    # Make sure we don't test for pre-existing files.
    self.assertFalse(os.path.isfile(saved_model_expected_file))
    self.assertFalse(os.path.isfile(tflite_output_file))
    self.assertFalse(os.path.isfile(labels_output_file))

    with flagsaver.flagsaver(
        image_dir=image_dir, tfhub_module=avg_model_dir,
        # This dataset is expected to be fit perfectly.
        assert_accuracy_at_least=0.9,
        saved_model_dir=saved_model_dir,
        tflite_output_file=tflite_output_file,
        labels_output_file=labels_output_file,
        **self.DEFAULT_FLAGS):
      make_image_classifier.main([])

    # Test that the SavedModel was written.
    self.assertTrue(os.path.isfile(saved_model_expected_file))

    # Test that the TFLite model works.
    labels = self._load_labels(labels_output_file)
    lite_model = self._load_lite_model(tflite_output_file)
    for class_name, rgb in self.CMY_NAMES_AND_RGB_VALUES:
      input_batch = (_fill_image(rgb, self.IMAGE_SIZE)[None, ...]
                     / np.array(255., dtype=np.float32))
      output_batch = lite_model(input_batch)
      prediction = labels[np.argmax(output_batch[0])]
      self.assertEqual(class_name, prediction)
Example #2
0
  def testEndToEndAccuracyFailure(self):
    logging.info("Using testdata in %s", self.get_temp_dir())
    avg_model_dir = self._export_global_average_model()
    image_dir = self._write_random_dataset()

    with flagsaver.flagsaver(
        image_dir=image_dir, tfhub_module=avg_model_dir,
        # This is expeced to fail for this random dataset.
        assert_accuracy_at_least=0.8, **self.DEFAULT_FLAGS):
      with self.assertRaisesRegex(AssertionError, "ACCURACY FAILED"):
        make_image_classifier.main([])
    def testEndToEndAccuracyFailure(self, use_tf_data_input):
        if use_tf_data_input and (LooseVersion(tf.__version__) <
                                  LooseVersion("2.5.0")):
            return
        logging.info("Using testdata in %s", self.get_temp_dir())
        avg_model_dir = self._export_global_average_model()
        image_dir = self._write_random_dataset()

        with flagsaver.flagsaver(
                image_dir=image_dir,
                tfhub_module=avg_model_dir,
                # This is expected to fail for this random dataset.
                assert_accuracy_at_least=0.9,
                use_tf_data_input=use_tf_data_input,
                **self.DEFAULT_FLAGS):
            with self.assertRaisesRegex(AssertionError, "ACCURACY FAILED"):
                make_image_classifier.main([])
Example #4
0
  def testEndToEndSuccess(self):
    logging.info("Using testdata in %s", self.get_temp_dir())
    avg_model_dir = self._export_global_average_model()
    image_dir = self._write_cmy_dataset()
    saved_model_dir = os.path.join(self.get_temp_dir(), "final_saved_model")
    self.assertFalse(  # Make sure we don't test for pre-existing files.
        os.path.isfile(os.path.join(saved_model_dir, "saved_model.pb")))

    with flagsaver.flagsaver(
        image_dir=image_dir, tfhub_module=avg_model_dir,
        # This dataset is expected to be fit perfectly.
        assert_accuracy_at_least=0.9,
        saved_model_dir=saved_model_dir, **self.DEFAULT_FLAGS):
      make_image_classifier.main([])

    # Test for main output artifact.
    self.assertTrue(
        os.path.isfile(os.path.join(saved_model_dir, "saved_model.pb")))