Esempio n. 1
0
def print_specific_output(model_path,
                          input_tensor,
                          output_name,
                          print_tensor=False):
    model_onnx = load_onnx_model(model_path)
    num_onnx = select_model_inputs_outputs(model_onnx, output_name)
    save_onnx_model(num_onnx, "remove_temp.onnx")
    sess = rt.InferenceSession("remove_temp.onnx")
    out_tensor = sess.run(None, input_tensor)
    print("name", output_name, "shape", out_tensor[0].shape)
    if print_tensor:
        print(out_tensor[0])
Esempio n. 2
0
    def _modify_model_add_outputs_nodes(self, model_dir):
        old_onnx_model = onnx.load(self.args.model_path)
        utils.print_info_log("load model success")
        for index, node in enumerate(old_onnx_model.graph.node):
            if not node.name:
                node.name = node.op_type + "_" + str(index)
        outputs_name = [
            name for name in enumerate_model_node_outputs(old_onnx_model)
        ]
        new_onnx_model = select_model_inputs_outputs(old_onnx_model,
                                                     outputs_name)
        new_onnx_model_path = os.path.join(
            model_dir, "new_" + os.path.basename(self.args.model_path))
        save_onnx_model(new_onnx_model, new_onnx_model_path)
        utils.print_info_log("modify model outputs success")

        return old_onnx_model, new_onnx_model_path
Esempio n. 3
0
    def get_model(self, model):
        try:
            import onnxruntime  # noqa
        except ImportError:
            return None

        from onnxruntime import InferenceSession

        session = InferenceSession(save_onnx_model(model))
        return lambda X: session.run(None, {"input": X})[0]
Esempio n. 4
0
    def test_onnx_helper_load_save(self):
        model = make_pipeline(StandardScaler(), Binarizer(threshold=0.5))
        X = numpy.array([[0.1, 1.1], [0.2, 2.2]])
        model.fit(X)
        model_onnx = convert_sklearn(model, "binarizer",
                                     [("input", FloatTensorType([1, 2]))])
        filename = "temp_onnx_helper_load_save.onnx"
        save_onnx_model(model_onnx, filename)
        model = load_onnx_model(filename)
        new_model = select_model_inputs_outputs(model, "variable")
        assert new_model.graph is not None

        tr1 = self.get_model(model)
        tr2 = self.get_model(new_model)
        X = X.astype(numpy.float32)
        X1 = tr1(X)
        X2 = tr2(X)
        assert X1.shape == (2, 2)
        assert X2.shape == (2, 2)
Esempio n. 5
0
    def test_onnx_helper_load_save_init(self):
        model = make_pipeline(Binarizer(), OneHotEncoder(sparse=False),
                              StandardScaler())
        X = numpy.array([[0.1, 1.1], [0.2, 2.2], [0.4, 2.2], [0.2, 2.4]])
        model.fit(X)
        model_onnx = convert_sklearn(model, "pipe3",
                                     [("input", FloatTensorType([None, 2]))])
        filename = "temp_onnx_helper_load_save.onnx"
        save_onnx_model(model_onnx, filename)
        model = load_onnx_model(filename)
        new_model = select_model_inputs_outputs(model, "variable")
        assert new_model.graph is not None

        tr1 = self.get_model(model)
        tr2 = self.get_model(new_model)
        X = X.astype(numpy.float32)
        X1 = tr1(X)
        X2 = tr2(X)
        assert X1.shape == (4, 2)
        assert X2.shape == (4, 2)
Esempio n. 6
0
    def test_onnx_helper_load_save(self):
        model = make_pipeline(StandardScaler(), Binarizer(threshold=0.5))
        X = numpy.array([[0.1, 1.1], [0.2, 2.2]])
        model.fit(X)
        model_onnx = convert_sklearn(model, 'binarizer',
                                     [('input', FloatTensorType([1, 2]))])
        filename = "temp_onnx_helper_load_save.onnx"
        save_onnx_model(model_onnx, filename)
        model = load_onnx_model(filename)
        list(enumerate_model_node_outputs(model))
        new_model = select_model_inputs_outputs(model, 'variable')
        self.assertTrue(new_model.graph is not None)

        tr1 = self.get_model(model)
        tr2 = self.get_model(new_model)
        X = X.astype(numpy.float32)
        X1 = tr1(X)
        X2 = tr2(X)
        self.assertEqual(X1.shape, (2, 2))
        self.assertEqual(X2.shape, (2, 2))
Esempio n. 7
0
    def test_onnx_helper_load_save_init(self):
        model = make_pipeline(Binarizer(), OneHotEncoder(sparse=False),
                              StandardScaler())
        X = numpy.array([[0.1, 1.1], [0.2, 2.2], [0.4, 2.2], [0.2, 2.4]])
        model.fit(X)
        model_onnx = convert_sklearn(model, 'pipe3',
                                     [('input', FloatTensorType([1, 2]))])
        filename = "temp_onnx_helper_load_save.onnx"
        save_onnx_model(model_onnx, filename)
        model = load_onnx_model(filename)
        list(enumerate_model_node_outputs(model))
        new_model = select_model_inputs_outputs(model, 'variable')
        self.assertTrue(new_model.graph is not None)  # pylint: disable=E1101

        tr1 = self.get_model(model)
        tr2 = self.get_model(new_model)
        X = X.astype(numpy.float32)
        X1 = tr1(X)
        X2 = tr2(X)
        self.assertEqual(X1.shape, (4, 2))
        self.assertEqual(X2.shape, (4, 2))
Esempio n. 8
0
# Let's see first the list of intermediate output.

model_onnx = load_onnx_model("pipeline_titanic.onnx")
for out in enumerate_model_node_outputs(model_onnx):
    print(out)

################################
# Not that easy to tell which one is what as the *ONNX*
# has more operators than the original *scikit-learn* pipelines.
# The graph at :ref:`l-plot-complex-pipeline-graph`
# helps up to find the outputs of both numerical
# and textual pipeline: *variable1*, *variable2*.
# Let's look into the numerical pipeline first.

num_onnx = select_model_inputs_outputs(model_onnx, 'variable1')
save_onnx_model(num_onnx, "pipeline_titanic_numerical.onnx")

################################
# Let's compute the numerical features.

sess = rt.InferenceSession("pipeline_titanic_numerical.onnx")
numX = sess.run(None, inputs)
print("numerical features", numX[0][:1])

###########################################
# We do the same for the textual features.

print(model_onnx)
text_onnx = select_model_inputs_outputs(model_onnx, 'variable2')
save_onnx_model(text_onnx, "pipeline_titanic_textual.onnx")
sess = rt.InferenceSession("pipeline_titanic_textual.onnx")
Esempio n. 9
0
    inputs.append(onnx.load_tensor(input_file))
"""

# For some models this string has to match the input to the model
inputDict = {"image": inputs[0]}

outputs_list = []
for out in enumerate_model_node_outputs(model_onnx):
    outputs_list.append(out)

print(outputs_list)

for idx, out in enumerate(outputs_list):
    name = str(idx) + "_" + out
    dataset = "test_data_set_0"
    os.mkdir(name)
    os.mkdir(name + "/" + dataset)
    modelPath = name + "/" + name + ".onnx"
    model_output = select_model_inputs_outputs(model_onnx, out)
    save_onnx_model(model_output, modelPath)
    sess = rt.InferenceSession(modelPath)
    numX = sess.run(None, inputDict)
    print()
    print("Generating idx=", idx)
    print(out)
    print(numX)

    # hardcoded for 1 output
    numpy_to_pb(out, numX[0], name + "/" + dataset + "/" + "output_0.pb")
    numpy_to_pb(out, inputs[0], name + "/" + dataset + "/" + "input_0.pb")
Esempio n. 10
0
 def get_model(self, model):
     session = InferenceSession(save_onnx_model(model))
     return lambda X: session.run(None, {'input': X})[0]