def print_specific_output(model_path, input_tensor, output_name, print_tensor=False): model_onnx = load_onnx_model(model_path) num_onnx = select_model_inputs_outputs(model_onnx, output_name) save_onnx_model(num_onnx, "remove_temp.onnx") sess = rt.InferenceSession("remove_temp.onnx") out_tensor = sess.run(None, input_tensor) print("name", output_name, "shape", out_tensor[0].shape) if print_tensor: print(out_tensor[0])
def opsets(self): """ Returns the opsets as dictionary ``{domain: opset}``. """ if hasattr(self, 'onnxrt_'): model = self.onnxrt_.obj else: model = load_onnx_model(self.onnx_bytes) res = {} for oimp in model.opset_import: res[oimp.domain] = oimp.version return res
def test_onnx_helper_load_save(self): model = make_pipeline(StandardScaler(), Binarizer(threshold=0.5)) X = numpy.array([[0.1, 1.1], [0.2, 2.2]]) model.fit(X) model_onnx = convert_sklearn(model, "binarizer", [("input", FloatTensorType([1, 2]))]) filename = "temp_onnx_helper_load_save.onnx" save_onnx_model(model_onnx, filename) model = load_onnx_model(filename) new_model = select_model_inputs_outputs(model, "variable") assert new_model.graph is not None tr1 = self.get_model(model) tr2 = self.get_model(new_model) X = X.astype(numpy.float32) X1 = tr1(X) X2 = tr2(X) assert X1.shape == (2, 2) assert X2.shape == (2, 2)
def test_onnx_helper_load_save_init(self): model = make_pipeline(Binarizer(), OneHotEncoder(sparse=False), StandardScaler()) X = numpy.array([[0.1, 1.1], [0.2, 2.2], [0.4, 2.2], [0.2, 2.4]]) model.fit(X) model_onnx = convert_sklearn(model, "pipe3", [("input", FloatTensorType([None, 2]))]) filename = "temp_onnx_helper_load_save.onnx" save_onnx_model(model_onnx, filename) model = load_onnx_model(filename) new_model = select_model_inputs_outputs(model, "variable") assert new_model.graph is not None tr1 = self.get_model(model) tr2 = self.get_model(new_model) X = X.astype(numpy.float32) X1 = tr1(X) X2 = tr2(X) assert X1.shape == (4, 2) assert X2.shape == (4, 2)
def test_onnx_helper_load_save(self): model = make_pipeline(StandardScaler(), Binarizer(threshold=0.5)) X = numpy.array([[0.1, 1.1], [0.2, 2.2]]) model.fit(X) model_onnx = convert_sklearn(model, 'binarizer', [('input', FloatTensorType([1, 2]))]) filename = "temp_onnx_helper_load_save.onnx" save_onnx_model(model_onnx, filename) model = load_onnx_model(filename) list(enumerate_model_node_outputs(model)) new_model = select_model_inputs_outputs(model, 'variable') self.assertTrue(new_model.graph is not None) tr1 = self.get_model(model) tr2 = self.get_model(new_model) X = X.astype(numpy.float32) X1 = tr1(X) X2 = tr2(X) self.assertEqual(X1.shape, (2, 2)) self.assertEqual(X2.shape, (2, 2))
def test_onnx_helper_load_save_init(self): model = make_pipeline(Binarizer(), OneHotEncoder(sparse=False), StandardScaler()) X = numpy.array([[0.1, 1.1], [0.2, 2.2], [0.4, 2.2], [0.2, 2.4]]) model.fit(X) model_onnx = convert_sklearn(model, 'pipe3', [('input', FloatTensorType([1, 2]))]) filename = "temp_onnx_helper_load_save.onnx" save_onnx_model(model_onnx, filename) model = load_onnx_model(filename) list(enumerate_model_node_outputs(model)) new_model = select_model_inputs_outputs(model, 'variable') self.assertTrue(new_model.graph is not None) # pylint: disable=E1101 tr1 = self.get_model(model) tr2 = self.get_model(new_model) X = X.astype(numpy.float32) X1 = tr1(X) X2 = tr2(X) self.assertEqual(X1.shape, (4, 2)) self.assertEqual(X2.shape, (4, 2))
def enumerate_create(onnx_bytes, output_names=None, enforce_float32=True): """ Creates multiple *OnnxTransformer*, one for each requested intermediate node. onnx_bytes : bytes output_names: string requested output names or None to request all and have method *transform* to store all of them in a dataframe enforce_float32 : boolean :epkg:`onnxruntime` only supports *float32*, :epkg:`scikit-learn` usually uses double floats, this parameter ensures that every array of double floats is converted into single floats :return: iterator on OnnxTransformer *('output name', OnnxTransformer)* """ selected = None if output_names is None else set(output_names) model = load_onnx_model(onnx_bytes) for out in enumerate_model_node_outputs(model): m = select_model_inputs_outputs(model, out) if selected is None or out in selected: tr = OnnxTransformer(m.SerializeToString(), enforce_float32=enforce_float32) yield out, tr
sess = rt.InferenceSession("pipeline_titanic.onnx") pred_onx = sess.run(None, inputs) print("predict", pred_onx[0][:5]) print("predict_proba", pred_onx[1][:1]) #################################### # Compute intermediate outputs # ++++++++++++++++++++++++++++ # # Unfortunately, there is actually no way to ask # *onnxruntime* to retrieve the output of intermediate nodes. # We need to modifies the *ONNX* before it is given to *onnxruntime*. # Let's see first the list of intermediate output. model_onnx = load_onnx_model("pipeline_titanic.onnx") for out in enumerate_model_node_outputs(model_onnx): print(out) ################################ # Not that easy to tell which one is what as the *ONNX* # has more operators than the original *scikit-learn* pipelines. # The graph at :ref:`l-plot-complex-pipeline-graph` # helps up to find the outputs of both numerical # and textual pipeline: *variable1*, *variable2*. # Let's look into the numerical pipeline first. num_onnx = select_model_inputs_outputs(model_onnx, 'variable1') save_onnx_model(num_onnx, "pipeline_titanic_numerical.onnx") ################################
def print_model_outputs(model_path, input_tensor, print_tensor=False): model_onnx = load_onnx_model(model_path) for idx, out in enumerate(enumerate_model_node_outputs(model_onnx)): print_specific_output(model_path, input_tensor, out, print_tensor)