Esempio n. 1
0
print("output names:", [o.name for o in sess.get_outputs()])
res = sess.run(None, {'X': X_test[:2]})
print("outputs")
pprint.pprint(res)

# Function *select_model_inputs_outputs* add also promote an intermediate
# result to an output.
#
#####################################
# This example only uses ONNX graph in memory and never saves or loads a
# model. This can be done by using the following snippets of code.
#
# Save a model
# ++++++++++++

with open("simplified_model.onnx", "wb") as f:
    f.write(simple_onx.SerializeToString())

###################################
# Load a model
# ++++++++++++

model = load("simplified_model.onnx", "wb")

sess = InferenceSession(model.SerializeToString(),
                        providers=['CPUExecutionProvider'])
print("output names:", [o.name for o in sess.get_outputs()])
res = sess.run(None, {'X': X_test[:2]})
print("outputs")
pprint.pprint(res)