예제 #1
0
 def run_onnxmsrtnext(onnx_graph, inputs, output_names, test_name):
     """Run test against msrt-next backend."""
     import lotus
     model_path = os.path.join(TMPPATH, test_name + ".pb")
     with open(model_path, "wb") as f:
         f.write(onnx_graph.SerializeToString())
     m = lotus.InferenceSession(model_path)
     results = m.run(output_names, inputs)
     return results[0]
예제 #2
0
    def run_onnxmsrtnext(self, onnx_graph, inputs, output_names, test_name):
        """Run test against msrt-next backend."""
        import lotus
        model_path = os.path.join(type(self).TMPPATH, test_name + ".onnx")
        self.log.debug("create model file: %s", model_path)
        with open(model_path, "wb") as f:
            f.write(onnx_graph.SerializeToString())

        m = lotus.InferenceSession(model_path)
        results = m.run(output_names, inputs)
        return results
예제 #3
0
 def run_onnxmsrtnext(self, name, model_proto, inputs):
     """Run test against msrt-next backend."""
     import lotus
     model_path = utils.save_onnx_model(TMPPATH, name, inputs, model_proto)
     m = lotus.InferenceSession(model_path)
     results = m.run(self.output_names, inputs)
     if self.perf:
         start = time.time()
         for _ in range(PERFITER):
             _ = m.run(self.output_names, inputs)
         self.onnx_runtime = time.time() - start
     return results
예제 #4
0
 def run_onnxmsrtnext(self, name, model_proto, inputs):
     """Run test against msrt-next backend."""
     import lotus
     model_path = os.path.join(TMPPATH, name + ".pb")
     with open(model_path, "wb") as f:
         f.write(model_proto.SerializeToString())
     m = lotus.InferenceSession(model_path)
     results = m.run(self.output_names, inputs)
     if self.perf:
         start = time.time()
         for _ in range(PERFITER):
             _ = m.run(self.output_names, inputs)
         self.onnx_runtime = time.time() - start
     return results
예제 #5
0
 def run_onnxmsrtnext(self, model_path, inputs, output_names):
     """Run test against msrt-next backend."""
     import lotus
     m = lotus.InferenceSession(model_path)
     results = m.run(output_names, inputs)
     return results