def test_examples(example_dir, artifacts): example_dir = os.path.join(EXAMPLES_ROOT, example_dir) readme = os.path.join(example_dir, "README.md") commands = load_commands_from_readme(readme) for command in commands: G_LOGGER.info(command) assert sp.run(["bash", "-c", command], cwd=example_dir, env={"PYTHONPATH": ROOT_DIR}).returncode == 0 for artifact in artifacts: artifact_path = os.path.join(example_dir, artifact.name) assert os.path.exists(artifact_path) if artifact.infer: assert infer_model(artifact_path) os.remove(artifact_path)
def infer_model(path): model = onnx.load(path) graph = gs.import_onnx(model) feed_dict = {} for tensor in graph.inputs: feed_dict[tensor.name] = np.random.random_sample(size=tensor.shape).astype(tensor.dtype) output_names = [out.name for out in graph.outputs] sess = onnxruntime.InferenceSession(model.SerializeToString()) outputs = sess.run(output_names, feed_dict) G_LOGGER.info("Inference outputs: {:}".format(outputs)) return outputs