def test_quantize_network(self, model_name, model_builder, input_size): net = model_builder() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') qnet = QuantizedNetwork(net, input_size) _ = qnet(torch.zeros(*input_size)) _ = qnet(torch.zeros(*input_size)) check_graph(to_networkx(ctx), model_name, 'quantized')
def test_quantize_network(self, model_name, model_builder, input_size, _quantize_config): net = model_builder() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') qnet = QuantizedNetwork(net, _quantize_config.quantizer, input_size, dummy_forward_fn=create_dummy_forward_fn(input_size)) _ = qnet(torch.zeros(*input_size)) _ = qnet(torch.zeros(*input_size)) check_graph(to_networkx(ctx), model_name, _quantize_config.graph_dir)
def test_build_graph(self, model_name, model_builder, input_size): net = model_builder() ctx = reset_context('test') with context('test') as c: _ = net(torch.zeros(input_size)) c.reset_scope_operator_call_counters() _ = net(torch.zeros(input_size)) check_graph(to_networkx(ctx), model_name, 'original')
def test_resnet18__with_qinput(): net = test_models.ResNet18() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 32, 32) qnet = QuantizedNetwork(net, input_shape, quantize_inputs=True) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'resnet18_qinput.dot', 'quantized')
def test_resnet18__with_ignore(): net = test_models.ResNet18() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 32, 32) qnet = QuantizedNetwork(net, input_shape, ignored_scopes=['ResNet/Sequential[layer3]']) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'resnet18_ignore.dot', 'quantized')
def test_output_quantization(_quantize_config): net = test_models.UNet() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 360, 480) qnet = QuantizedNetwork(net, _quantize_config.quantizer, input_shape, dummy_forward_fn=create_dummy_forward_fn(input_shape), quantize_outputs=True) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'unet_qoutput.dot', _quantize_config.graph_dir)
def test_resnet18__with_ignore(_quantize_config): net = test_models.ResNet18() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 32, 32) qnet = QuantizedNetwork(net, _quantize_config.quantizer, input_shape, dummy_forward_fn=create_dummy_forward_fn(input_shape), ignored_scopes=['ResNet/Sequential[layer3]']) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'resnet18_ignore.dot', _quantize_config.graph_dir)
def test_resnet18__with_not_qinput(_quantize_config): net = test_models.ResNet18() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 32, 32) qnet = QuantizedNetwork(net, _quantize_config.quantizer, input_shape, dummy_forward_fn=create_dummy_forward_fn(input_shape), quantize_inputs=False) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'resnet18_no_qinput.dot', _quantize_config.graph_dir)
def test_custom_quantizable_subgraph_patterns(_quantize_config): net = test_models.SENet18() ctx = reset_context('orig') ctx = reset_context('quantized_graphs') input_shape = (1, 3, 32, 32) qnet = QuantizedNetwork(net, _quantize_config.quantizer, input_shape, dummy_forward_fn=create_dummy_forward_fn(input_shape), quantize_outputs=False, quantizable_subgraph_patterns=(("sigmoid", "__mul__"), ("__iadd__", "batch_norm"))) _ = qnet(torch.zeros(*input_shape)) _ = qnet(torch.zeros(*input_shape)) check_graph(to_networkx(ctx), 'senet_custom_patterns.dot', _quantize_config.graph_dir)
def test_iterate_module_list(): class Net(nn.Module): def __init__(self): super().__init__() self.ml = nn.ModuleList([nn.Conv2d(1, 1, 1), nn.Conv2d(1, 1, 1)]) def forward(self, x): return [self.ml[0](x), self.ml[1](x)] net = Net() ctx = reset_context('test') with context('test'): _ = net(torch.zeros(1, 1, 1, 1)) check_graph(to_networkx(ctx), 'case_iterate_module_list.dot', 'original')
def test_sparse_network(self, model_name, model_builder, input_size, algo, params): model = model_builder() from nncf.layers import NNCF_MODULES_MAP sparsifiable_modules = list(NNCF_MODULES_MAP.values()) ref_num_sparsed = len(get_all_modules_by_type(model, sparsifiable_modules)) ctx = reset_context('test') config = get_empty_config(input_sample_size=input_size) config["compression"] = {"algorithm": algo, "params": params} compression_algo = create_compression_algorithm(model, config) assert ref_num_sparsed == len(compression_algo.sparsified_module_info) model = compression_algo.model with context('test') as c: _ = model(torch.zeros(input_size)) c.reset_scope_operator_call_counters() _ = model(torch.zeros(input_size)) check_graph(to_networkx(ctx), model_name, algo)