def testInvalidConstructor(self):
    message = ('If input_tensors and output_tensors are None, both '
               'input_arrays_with_shape and output_arrays must be defined.')

    # `output_arrays` is not defined.
    with self.assertRaises(ValueError) as error:
      lite.TFLiteConverter(
          None, None, [], input_arrays_with_shape=[('input', [3, 9])])
    self.assertEqual(message, str(error.exception))

    # `input_arrays_with_shape` is not defined.
    with self.assertRaises(ValueError) as error:
      lite.TFLiteConverter(None, [], None, output_arrays=['output'])
    self.assertEqual(message, str(error.exception))
  def testValidConstructor(self):
    converter = lite.TFLiteConverter(
        None,
        None,
        None,
        input_arrays_with_shape=[('input', [3, 9])],
        output_arrays=['output'])
    self.assertFalse(converter._has_valid_tensors())
    self.assertEqual(converter.get_input_arrays(), ['input'])

    with self.assertRaises(ValueError) as error:
      converter._set_batch_size(1)
    self.assertEqual(
        'The batch size cannot be set for this model. Please use '
        'input_shapes parameter.', str(error.exception))

    converter = lite.TFLiteConverter(None, ['input_tensor'], ['output_tensor'])
    self.assertTrue(converter._has_valid_tensors())
예제 #3
0
  def test_conversion_from_constructor_success(self):
    frozen_graph_def = self._constructGraphDef()

    # Check metrics when conversion successed.
    converter = lite.TFLiteConverter(frozen_graph_def, None, None,
                                     [('in_tensor', [2, 16, 16, 3])], ['add'])
    mock_metrics = mock.create_autospec(metrics.TFLiteMetrics, instance=True)
    converter._tflite_metrics = mock_metrics
    tflite_model = converter.convert()
    self.assertIsNotNone(tflite_model)
    mock_metrics.assert_has_calls([
        mock.call.increase_counter_converter_attempt(),
        mock.call.increase_counter_converter_success(),
        mock.call.set_converter_param('input_format', '1'),
        mock.call.set_converter_param('enable_mlir_converter', 'True'),
        mock.call.set_converter_param('allow_custom_ops', 'False'),
    ], any_order=True)  # pyformat: disable
예제 #4
0
  def test_conversion_from_constructor_fail(self):
    frozen_graph_def = self._constructGraphDef()

    # Check metrics when conversion failed.
    converter = lite.TFLiteConverter(frozen_graph_def, None, None,
                                     [('wrong_tensor', [2, 16, 16, 3])],
                                     ['add'])
    mock_metrics = mock.create_autospec(metrics.TFLiteMetrics, instance=True)
    converter._tflite_metrics = mock_metrics
    with self.assertRaises(ConverterError):
      converter.convert()
    mock_metrics.assert_has_calls([
        mock.call.increase_counter_converter_attempt(),
        mock.call.set_converter_param('output_format', '2'),
        mock.call.set_converter_param('select_user_tf_ops', 'set()'),
        mock.call.set_converter_param('post_training_quantize', 'False'),
    ], any_order=True)  # pyformat: disable
    mock_metrics.increase_counter_converter_success.assert_not_called()