Esempio n. 1
0
 def test_invalid_format_invalid_extension(self):
     _, invalid_filename = tempfile.mkstemp(
         suffix=".invalid", prefix=self.saved_model_dir
     )
     with pytest.raises(NotImplementedError) as e:
         converter.convert(invalid_filename, source="tensorflow")
     e.match(r"Expected model format: .* .pb")
Esempio n. 2
0
 def test_invalid_converter_target(self):
     with tf.Graph().as_default() as graph:
         x = tf.placeholder(tf.float32, shape=(3, 4, 5))
         out = tf.nn.relu(x)
     with pytest.raises(NotImplementedError) as e:
         converter.convert(graph, convert_to="invalid", source="tensorflow")
     e.match(r"Backend converter .* not implemented")
Esempio n. 3
0
    def test_invalid_converter_type(self):
        with pytest.raises(ValueError) as e:
            converter.convert(None, source="invalid")

        expected_msg = r'Unrecognized value of argument "source": .*'
        e.match(expected_msg)

        with pytest.raises(NotImplementedError) as e:
            converter.convert(None, convert_to="invalid", source=frontend)
        e.match(r"Backend converter .* not implemented")
Esempio n. 4
0
    def test_invalid_converter_minimum_deployment_flag(self):
        with pytest.raises(TypeError) as e:
            converter.convert(None,
                              source="tensorflow",
                              minimum_deployment_target="iOs14")
        expected_msg = (
            "Unrecognized value of argument 'minimum_deployment_target': iOs14. "
            "It needs to be a member of 'coremltools.target' enumeration")

        e.match(expected_msg)
Esempio n. 5
0
    def test_valid_deployment_target_cumsum(self, target):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.math.cumsum(x, axis=-1, reverse=False, exclusive=False)

        model, inputs, outputs = build_model

        # successful conversion
        converter.convert(model, minimum_deployment_target=target)
Esempio n. 6
0
    def test_invalid_output_names(self):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model
        with pytest.raises(AssertionError) as e:
            converter.convert(model, source=frontend, outputs=["invalid_name"])
        e.match(r".* is not in graph")
Esempio n. 7
0
    def test_invalid_format_multiple_concrete_functions(self):
        class build_model(tf.Module):
            @tf.function(input_signature=[
                tf.TensorSpec(shape=[3, 4, 5], dtype=tf.float32)
            ])
            def __call__(self, x):
                return tf.nn.relu(x)

        model = build_model()
        cf = model.__call__.get_concrete_function()
        with pytest.raises(NotImplementedError) as e:
            converter.convert([cf, cf, cf], source=frontend)
        e.match(r"Only a single concrete function is supported")
Esempio n. 8
0
    def test_invalid_converter_type(self):
        keras_model = tf.keras.Sequential(
            [tf.keras.layers.ReLU(input_shape=(4, 5), batch_size=3)])
        with pytest.raises(ValueError) as e:
            converter.convert(keras_model, source="invalid")

        expected_msg = r'Unrecognized value of argument "source": .*'
        e.match(expected_msg)

        with pytest.raises(NotImplementedError) as e:
            converter.convert(keras_model,
                              convert_to="invalid",
                              source=frontend)
        e.match(r"Backend converter .* not implemented")
Esempio n. 9
0
    def test_missing_placeholder_shape(self):
        x_shape = None  # Missing Placeholder shape

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model
        with pytest.raises(ValueError) as e:
            converter.convert(model, source=frontend)
            e.match(r"Unable to determine the shape of input .*")

        # Test must pass if a user provides shape during conversion,
        mlmodel = converter.convert(model, source=frontend, inputs=[ct.TensorType(shape=())])
        assert mlmodel is not None
Esempio n. 10
0
    def test_invalid_deployment_target_cumsum(self, target):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.math.cumsum(x, axis=-1, reverse=False, exclusive=False)

        model, inputs, outputs = build_model

        with pytest.raises(ValueError) as e:
            converter.convert(model, minimum_deployment_target=target)
        e.match(
            r"Provided minimum deployment target .* version 4 but converted model "
            r"uses .* available from version 5 onwards.\n    1. Cumsum operation\n"
        )
Esempio n. 11
0
    def test_invalid_input_names(self):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model

        with pytest.raises(ValueError) as e:
            converter.convert(model,
                              inputs=[TensorType("invalid_name", x_shape)])
        e.match(
            r"Input \(invalid_name\) provided is not found in given tensorflow graph. Placeholders in graph are: .*"
        )
Esempio n. 12
0
 def test_graph_def(self):
     with tf.Graph().as_default() as graph:
         x = tf.placeholder(tf.float32, shape=(3, 4, 5))
         out = tf.nn.relu(x)
     mlmodel = converter.convert(
         graph, inputs=[TensorType(x.op.name, (3, 4, 5))], outputs=[out.op.name]
     )
     assert mlmodel is not None
Esempio n. 13
0
 def test_saved_model_from_simple_save(self):
     with tf.compat.v1.Session() as sess:
         x = tf.placeholder(shape=(1, 3, 5), dtype=tf.float32)
         y = tf.nn.relu(x)
         inputs = {"x": x}
         outputs = {"y": y}
         tf.compat.v1.saved_model.simple_save(sess, self.saved_model_dir,
                                              inputs, outputs)
     mlmodel = converter.convert(self.saved_model_dir)
     assert mlmodel is not None
Esempio n. 14
0
 def test_keras_saved_model_file(self):
     keras_model = tf.keras.Sequential([
         tf.keras.layers.Flatten(input_shape=(28, 28), batch_size=1),
         tf.keras.layers.Dense(10, activation=tf.nn.relu),
     ])
     keras_model.save(self.saved_model_dir, save_format="tf")
     mlmodel = converter.convert(self.saved_model_dir,
                                 outputs="Identity",
                                 source=frontend)
     assert mlmodel is not None
Esempio n. 15
0
def run_compare_tf_keras(
    model,
    input_values,
    use_cpu_only=False,
    frontend_only=False,
    frontend="tensorflow",
    backend="nn_proto",
    atol=1e-04,
    rtol=1e-05,
):
    """
    Parameters
    ----------
    model: TensorFlow 2.x model
        TensorFlow 2.x model annotated with @tf.function.
    input_values: list of np.array
        List of input values in the same order as the input signature.
    use_cpu_only: bool
        If true, use CPU only for prediction, otherwise, use GPU also.
    frontend_only: bool
        If true, skip the prediction call, only validate conversion.
    frontend: str
        Frontend to convert from.
    backend: str
        Backend to convert to.
    atol: float
        The absolute tolerance parameter.
    rtol: float
        The relative tolerance parameter.
    """

    proto = convert(model, source=frontend, convert_to=backend).get_spec()

    # assumes conversion preserve the i/o names
    inputs = sorted([str(i.name) for i in proto.description.input])
    outputs = [str(o.name) for o in proto.description.output]

    if frontend_only:
        return

    # get tf.keras model output as reference and run comparision
    ref = [model(input_values).numpy()]
    expected_outputs = {n: v for n, v in zip(outputs, ref)}
    input_key_values = {n: v for n, v in zip(inputs, input_values)}
    compare_backend(
        proto,
        input_key_values,
        expected_outputs,
        use_cpu_only,
        atol=atol,
        rtol=rtol,
        also_compare_shapes=True,
    )

    return proto
Esempio n. 16
0
    def test_auto_image_nchw_input_names(self):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model

        mlmodel = converter.convert(model, inputs=[ImageType(channel_first=True)])
        assert mlmodel is not None
Esempio n. 17
0
 def test_keras_model(self):
     keras_model = tf.keras.Sequential(
         [tf.keras.layers.ReLU(input_shape=(4, 5), batch_size=3)])
     input_names, output_names = get_tf_keras_io_names(keras_model)
     mlmodel = converter.convert(
         keras_model,
         inputs=[TensorType(input_names[0], (3, 4, 5))],
         outputs=["Identity"],
         source=frontend,
     )
     assert mlmodel is not None
Esempio n. 18
0
    def test_auto_image_nhwc_input_names(self):
        x_shape = (4, 5, 3)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model

        mlmodel = converter.convert(model, inputs=[ImageType()])
        assert mlmodel is not None
Esempio n. 19
0
 def test_model_metadata(self):
     with tf.Graph().as_default() as graph:
         x = tf.placeholder(tf.float32, shape=(3, 4, 5))
         out = tf.nn.relu(x)
     mlmodel = converter.convert(
         graph, inputs=[TensorType(x.op.name, (3, 4, 5))], outputs=[out.op.name]
     )
     metadata_keys = mlmodel.get_spec().description.metadata.userDefined
     assert "com.github.apple.coremltools.version" in metadata_keys
     assert "com.github.apple.coremltools.source" in metadata_keys
     assert "tensorflow==1." in metadata_keys["com.github.apple.coremltools.source"]
Esempio n. 20
0
    def test_auto_image_nhwc_input_names():
        x_shape = (4, 5, 3)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model

        mlmodel = converter.convert(model, inputs=[ImageType()])
        if mlmodel is None:
            raise AssertionError
Esempio n. 21
0
 def test_keras_h5_file(self):
     keras_model = tf.keras.Sequential(
         [tf.keras.layers.ReLU(input_shape=(4, 5), batch_size=3)])
     input_names, output_names = get_tf_keras_io_names(keras_model)
     keras_model.save(self.model_path_h5, save_format="h5")
     mlmodel = converter.convert(
         self.model_path_h5,
         inputs=[TensorType(input_names[0], (3, 4, 5))],
         outputs=["Identity"],
         source=frontend,
     )
     if mlmodel is None:
         raise AssertionError
Esempio n. 22
0
 def test_graph_def_file(self):
     with tf.Graph().as_default() as graph:
         x = tf.placeholder(tf.float32, shape=(3, 4, 5))
         out = tf.nn.relu(x)
     tf.io.write_graph(
         graph, self.saved_model_dir, self.model_path_pb, as_text=False
     )
     mlmodel = converter.convert(
         self.model_path_pb,
         inputs=[TensorType(x.op.name, (3, 4, 5))],
         outputs=[out.op.name],
     )
     assert mlmodel is not None
Esempio n. 23
0
    def test_infer_inputs_and_outputs(self):
        x_shape = (3, 4, 5)

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model
        mlmodel = converter.convert(model)
        assert mlmodel is not None

        input_values = [random_gen(x_shape, -10.0, 10.0)]
        input_dict = dict(zip(inputs, input_values))
        run_compare_tf(model, input_dict, outputs)
Esempio n. 24
0
    def test_scalar_placeholder_shape(self):
        x_shape = ()  # Scalar Placeholder Shape

        @make_tf_graph([x_shape])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model
        mlmodel = converter.convert(model, source=frontend)
        assert mlmodel is not None

        input_values = [random_gen(x_shape, -10.0, 10.0)]
        input_dict = dict(zip(inputs, input_values))
        run_compare_tf(model, input_dict, outputs)
Esempio n. 25
0
    def test_concrete_function_list_from_tf_function(self):
        class build_model(tf.Module):
            @tf.function(input_signature=[
                tf.TensorSpec(shape=[3, 4, 5], dtype=tf.float32)
            ])
            def __call__(self, x):
                return tf.nn.relu(x)

        model = build_model()
        concrete_func = model.__call__.get_concrete_function()
        mlmodel = converter.convert([concrete_func],
                                    outputs=["Identity"],
                                    source=frontend)
        assert mlmodel is not None
Esempio n. 26
0
    def test_extract_sub_model(self):
        x_shape = (3, 4, 5)
        y_shape = (3, 4, 5)

        @make_tf_graph([x_shape, y_shape])
        def build_model(x, y):
            return tf.nn.relu(x), tf.math.add(x, y)

        model, inputs, outputs = build_model
        if isinstance(outputs[0], six.string_types):
            first_output_name = outputs[0]
        else:
            first_output_name = outputs[0].name.split(":")[0]
        mlmodel = converter.convert(model, outputs=[first_output_name])
        assert mlmodel is not None
Esempio n. 27
0
    def test_saved_model_list_from_tf_function(self):
        class build_model(tf.Module):
            @tf.function(input_signature=[
                tf.TensorSpec(shape=[3, 4, 5], dtype=tf.float32)
            ])
            def __call__(self, x):
                return tf.nn.relu(x)

        model = build_model()
        tf.saved_model.save(model, self.saved_model_dir)
        mlmodel = converter.convert(self.saved_model_dir,
                                    outputs=["Identity"],
                                    source=frontend)
        if mlmodel is None:
            raise AssertionError
Esempio n. 28
0
def convert_to_mlmodel(model_spec, tensor_inputs):
    def _convert_to_inputtype(inputs):
        if isinstance(inputs, list):
            return [_convert_to_inputtype(x) for x in inputs]
        elif isinstance(inputs, tuple):
            return tuple([_convert_to_inputtype(x) for x in inputs])
        elif isinstance(inputs, torch.Tensor):
            return TensorType(shape=inputs.shape)
        else:
            raise ValueError("Unable to parse type {} into InputType.".format(
                type(inputs)))

    mlmodel = convert(model_spec,
                      inputs=list(_convert_to_inputtype(tensor_inputs)))
    return mlmodel
Esempio n. 29
0
    def test_default_data_types(self):
        @make_tf_graph([(2, 2)])
        def build_model(x):
            return tf.nn.relu(x)

        model, inputs, outputs = build_model
        mlmodel = converter.convert(model)
        assert mlmodel is not None
        spec = mlmodel.get_spec()

        # Defaults should be FLOAT32 instead of DOUBLE
        it = spec.description.input[0].type.multiArrayType.dataType
        assert it == ft.ArrayFeatureType.ArrayDataType.Value("FLOAT32")
        ot = spec.description.output[0].type.multiArrayType.dataType
        assert ot == ft.ArrayFeatureType.ArrayDataType.Value("FLOAT32")
Esempio n. 30
0
 def test_model_metadata(self):
     keras_model = tf.keras.Sequential(
         [tf.keras.layers.ReLU(input_shape=(4, 5), batch_size=3)])
     input_names, output_names = get_tf_keras_io_names(keras_model)
     mlmodel = converter.convert(
         keras_model,
         inputs=[TensorType(input_names[0], (3, 4, 5))],
         outputs=["Identity"],
         source=frontend,
     )
     metadata_keys = mlmodel.get_spec().description.metadata.userDefined
     assert "com.github.apple.coremltools.version" in metadata_keys
     assert "com.github.apple.coremltools.source" in metadata_keys
     assert "tensorflow==2." in metadata_keys[
         "com.github.apple.coremltools.source"]