def test_non_user_provided_inputs_never_shape_tensors(self): # If the user didn't provide metadata, then the value can never be a shape tensor. input_meta = TensorMetadata().add("X", dtype=np.int32, shape=(3, )) data_loader = DataLoader() data_loader.input_metadata = input_meta feed_dict = data_loader[0] assert feed_dict["X"].shape == (3, ) # Treat as a normal tensor
def test_shape_tensor_detected(self): INPUT_DATA = (1, 2, 3) input_meta = TensorMetadata().add("X", dtype=np.int32, shape=(3, )) # This contains the shape values overriden_meta = TensorMetadata().add("X", dtype=np.int32, shape=INPUT_DATA) data_loader = DataLoader(input_metadata=overriden_meta) data_loader.input_metadata = input_meta feed_dict = data_loader[0] assert np.all(feed_dict["X"] == INPUT_DATA) # values become INPUT_DATA
def test_can_override_shape(self): model = ONNX_MODELS["dynamic_identity"] shape = (1, 1, 4, 5) custom_input_metadata = TensorMetadata().add("X", dtype=None, shape=shape) data_loader = DataLoader(input_metadata=custom_input_metadata) # Simulate what the comparator does data_loader.input_metadata = model.input_metadata feed_dict = data_loader[0] assert tuple(feed_dict["X"].shape) == shape
def test_no_shape_tensor_false_positive_float(self): INPUT_DATA = (-100, -50, 0) # Float cannot be a shape tensor input_meta = TensorMetadata().add("X", dtype=np.float32, shape=(3, )) overriden_meta = TensorMetadata().add("X", dtype=np.float32, shape=INPUT_DATA) data_loader = DataLoader(input_metadata=overriden_meta) data_loader.input_metadata = input_meta feed_dict = data_loader[0] assert feed_dict["X"].shape == (3, ) # Values are NOT (3, ) assert np.any(feed_dict["X"] != INPUT_DATA) # Values are NOT (3, )
def test_no_shape_tensor_false_positive_negative_dims(self): INPUT_DATA = (-100, 2, 4) # This should NOT be detected as a shape tensor input_meta = TensorMetadata().add("X", dtype=np.int32, shape=(3, )) overriden_meta = TensorMetadata().add("X", dtype=np.int32, shape=INPUT_DATA) data_loader = DataLoader(input_metadata=overriden_meta) data_loader.input_metadata = input_meta feed_dict = data_loader[0] assert feed_dict["X"].shape == (3, ) # Shape IS (3, ), because this is NOT a shape tensor assert np.any(feed_dict["X"] != INPUT_DATA) # Contents are not INPUT_DATA, since it's not treated as a shape value
def test_val_range_dict_fallback(self, dtype): val_range = {"Y": (-3, 4)} data_loader = DataLoader(input_metadata=meta(dtype), val_range=val_range) feed_dict = data_loader[0] assert np.all((feed_dict["X"] >= 0) & (feed_dict["X"] <= 1)) assert np.all((feed_dict["Y"] >= -3) & (feed_dict["Y"] <= 4))
def test_calibrator_metadata_set(self, identity_builder_network): builder, network = identity_builder_network calibrator = Calibrator(DataLoader()) loader = CreateConfig(int8=True, calibrator=calibrator) with loader(builder, network) as config: assert config.int8_calibrator assert "x" in calibrator.data_loader.input_metadata
def test_val_range_dict(self, dtype): val_range = {"X": (2, 5), "Y": (-1, 2)} data_loader = DataLoader(input_metadata=meta(dtype), val_range=val_range) feed_dict = data_loader[0] assert np.all((feed_dict["X"] >= 2) & (feed_dict["X"] <= 5)) assert np.all((feed_dict["Y"] >= -1) & (feed_dict["Y"] <= 2))
def test_val_range_dict_default(self, dtype): val_range = {"": (6, 8), "Y": (-3, 4)} data_loader = DataLoader(input_metadata=meta(dtype), val_range=val_range) feed_dict = data_loader[0] assert np.all((feed_dict["X"] >= 6) & (feed_dict["X"] <= 8)) assert np.all((feed_dict["Y"] >= -3) & (feed_dict["Y"] <= 4))
def test_range_min_max_equal(self): RANGE_VAL = 1 data_loader = DataLoader(input_metadata=TensorMetadata().add( "X", dtype=np.int32, shape=(1, 1)), int_range=(RANGE_VAL, RANGE_VAL)) feed_dict = data_loader[0] assert np.all(feed_dict["X"] == RANGE_VAL)
def test_val_ranges(self, range): min_val, max_val, dtype = range data_loader = DataLoader(input_metadata=meta(dtype), val_range=(min_val, max_val)) feed_dict = data_loader[0] assert np.all((feed_dict["X"] >= min_val) & (feed_dict["X"] <= max_val))
def get_data_loader(self): script = Script() data_loader_name = self.add_to_script(script) if data_loader_name is None: # All arguments are default from polygraphy.comparator import DataLoader return DataLoader() exec(str(script), globals(), locals()) return locals()[data_loader_name]
def test_can_build_with_calibrator(self, identity_builder_network): builder, network = identity_builder_network calibrator = Calibrator(DataLoader()) create_config = CreateConfig(int8=True, calibrator=calibrator) loader = EngineFromNetwork((builder, network), create_config) with loader(): pass # Calibrator buffers should be freed after the build assert all([buf.allocated_nbytes == 0 for buf in calibrator.device_buffers.values()])
def test_multirun_outputs_are_different(self): onnx_loader = ONNX_MODELS["identity"].loader runner = TrtRunner(EngineFromNetwork(NetworkFromOnnxBytes(onnx_loader))) run_results = Comparator.run([runner], data_loader=DataLoader(iterations=2)) iteration0 = run_results[runner.name][0] iteration1 = run_results[runner.name][1] for name in iteration0.keys(): assert np.any(iteration0[name] != iteration1[name])
def get_data_loader(self, user_input_metadata=None): from polygraphy.comparator import DataLoader needs_invoke = False # run_script expects the callable to return just the variable name, but self.add_to_script # has 2 return values. We wrap it here to create a function with the right signature. def add_to_script_wrapper(script, *args, **kwargs): nonlocal needs_invoke name, needs_invoke = self._add_to_script(script, *args, **kwargs) return name data_loader = util.default( args_util.run_script(add_to_script_wrapper, user_input_metadata), DataLoader()) if needs_invoke: data_loader = data_loader() return data_loader
#!/usr/bin/env python3 # Template auto-generated by polygraphy [v0.33.0] on 05/07/22 at 07:01:05 # Generation Command: /usr/local/bin/polygraphy run model.onnx --onnxrt --trt --workspace 1000000000 --save-engine=model-FP32.plan --atol 1e-3 --rtol 1e-3 --verbose --gen-script=./polygraphyRun.py --trt-min-shapes tensor-0:[1,1,28,28] --trt-opt-shapes tensor-0:[4,1,28,28] --trt-max-shapes tensor-0:[16,1,28,28] --input-shapes tensor-0:[4,1,28,28] # This script compares /work/gitlab/tensorrt-cookbook-in-chinese/08-Tool/Polygraphy/runExample/model.onnx between ONNX Runtime and TensorRT. from polygraphy.logger import G_LOGGER G_LOGGER.severity = G_LOGGER.VERBOSE from polygraphy.backend.onnxrt import OnnxrtRunner, SessionFromOnnx from polygraphy.backend.trt import CreateConfig as CreateTrtConfig, EngineFromNetwork, NetworkFromOnnxPath, Profile, SaveEngine, TrtRunner from polygraphy.common import TensorMetadata from polygraphy.comparator import Comparator, CompareFunc, DataLoader import sys # Data Loader data_loader = DataLoader( input_metadata=TensorMetadata().add('tensor-0', None, (4, 1, 28, 28))) # Loaders build_onnxrt_session = SessionFromOnnx( '/work/gitlab/tensorrt-cookbook-in-chinese/08-Tool/Polygraphy/runExample/model.onnx' ) parse_network_from_onnx = NetworkFromOnnxPath( '/work/gitlab/tensorrt-cookbook-in-chinese/08-Tool/Polygraphy/runExample/model.onnx' ) profiles = [ Profile().add('tensor-0', min=[1, 1, 28, 28], opt=[4, 1, 28, 28], max=[16, 1, 28, 28]) ] create_trt_config = CreateTrtConfig(max_workspace_size=1000000000,
def get_data_loader(self, user_input_metadata=None): from polygraphy.comparator import DataLoader return util.default( args_util.run_script(self.add_to_script, user_input_metadata), DataLoader())
def test_default_ranges(self, dtype): data_loader = DataLoader(input_metadata=meta(dtype)) x, y = data_loader[0].values() assert np.all((x >= 0) & (x <= 1)) assert np.all((y >= 0) & (y <= 1))
def test_range_min_max_equal(self, dtype, range_val): data_loader = DataLoader(input_metadata=meta(dtype), val_range=(range_val, range_val)) feed_dict = data_loader[0] assert np.all(feed_dict["X"] == range_val) assert np.all(feed_dict["Y"] == range_val)