def _execute_pb_data( cls, model_dir: str, prepared_model: BackendRep, result_rtol: float, result_atol: float, ) -> int: executed_tests = 0 for test_data_dir in model_dir.glob("test_data_set*"): inputs = [] inputs_num = len(list(test_data_dir.glob("input_*.pb"))) for i in range(inputs_num): input_file = Path(test_data_dir) / "input_{}.pb".format(i) tensor = onnx.TensorProto() with open(input_file, "rb") as f: tensor.ParseFromString(f.read()) inputs.append(numpy_helper.to_array(tensor)) ref_outputs = [] ref_outputs_num = len(list(test_data_dir.glob("output_*.pb"))) for i in range(ref_outputs_num): output_file = Path(test_data_dir) / "output_{}.pb".format(i) tensor = onnx.TensorProto() with open(output_file, "rb") as f: tensor.ParseFromString(f.read()) ref_outputs.append(numpy_helper.to_array(tensor)) if (len(inputs) == 0): continue outputs = list(prepared_model.run(inputs)) cls.assert_similar_outputs(ref_outputs, outputs, result_rtol, result_atol) executed_tests = executed_tests + 1 return executed_tests
def _execute_npz_data( cls, model_dir: str, prepared_model: BackendRep, result_rtol: float, result_atol: float, ) -> None: for test_data_npz in glob.glob(os.path.join(model_dir, "test_data_*.npz")): test_data = np.load(test_data_npz, encoding="bytes") inputs = list(test_data["inputs"]) outputs = list(prepared_model.run(inputs)) ref_outputs = test_data["outputs"] cls.assert_similar_outputs(ref_outputs, outputs, result_rtol, result_atol)
def _execute_npz_data( cls, model_dir: str, prepared_model: BackendRep, result_rtol: float, result_atol: float, ) -> int: executed_tests = 0 for test_data_npz in model_dir.glob("test_data_*.npz"): test_data = np.load(test_data_npz, encoding="bytes") inputs = list(test_data["inputs"]) outputs = list(prepared_model.run(inputs)) ref_outputs = test_data["outputs"] cls.assert_similar_outputs(ref_outputs, outputs, result_rtol, result_atol) executed_tests = executed_tests + 1 return executed_tests
def _execute_npz_data( cls, model_dir: str, prepared_model: BackendRep, result_rtol: float, result_atol: float, post_processing: Callable[[Sequence[Any]], Sequence[Any]] = None) -> int: executed_tests = 0 for test_data_npz in model_dir.glob("test_data_*.npz"): test_data = np.load(test_data_npz, encoding="bytes") inputs = list(test_data["inputs"]) outputs = list(prepared_model.run(inputs)) ref_outputs = test_data["outputs"] if post_processing is not None: outputs = post_processing(outputs) cls.assert_similar_outputs(ref_outputs, outputs, result_rtol, result_atol) executed_tests = executed_tests + 1 return executed_tests
def _execute_pb_data( cls, model_dir: str, prepared_model: BackendRep, result_rtol: float, result_atol: float, ) -> None: for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")): inputs = [] inputs_num = len(glob.glob(os.path.join(test_data_dir, "input_*.pb"))) for i in range(inputs_num): input_file = os.path.join(test_data_dir, "input_{}.pb".format(i)) tensor = onnx.TensorProto() with open(input_file, "rb") as f: tensor.ParseFromString(f.read()) inputs.append(numpy_helper.to_array(tensor)) ref_outputs = [] ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, "output_*.pb"))) for i in range(ref_outputs_num): output_file = os.path.join(test_data_dir, "output_{}.pb".format(i)) tensor = onnx.TensorProto() with open(output_file, "rb") as f: tensor.ParseFromString(f.read()) ref_outputs.append(numpy_helper.to_array(tensor)) outputs = list(prepared_model.run(inputs)) cls.assert_similar_outputs(ref_outputs, outputs, result_rtol, result_atol)