def optimize_graph_step(graph: Graph, config: Config) -> None: """Optimizing graph that imported from tensorflow pb. Args: graph (Graph): Graph that optimization passes are applying to config (Config): Collection of configurations Returns: """ pass_remove_identities(graph) pass_transpose(graph) if config.activate_hard_quantization: pass_lookup(graph) pass_propagate_quantization_details_into_conv(graph) if config.threshold_skipping: pass_compute_thresholds(graph) pass_pack_weights(graph) pass_quantize_convolutions(graph) pass_propagate_datatypes(graph) pass_propagate_format(graph) pass_constant_folding(graph) pass_simplify_batchnorm(graph) pass_insert_cast(graph)
def test_pass_quantize_convolutions(self) -> None: """Test pass.""" data1 = np.float32(np.random.rand(1, 2, 2, 3)) data2 = np.float32(np.random.rand(1, 2, 2, 3)) graph1 = self.create_sample_graph(data1, data2) pass_quantize_convolutions(graph1) self.assertEqual(graph1.get_op('aqtz1').dtype, QUANTIZED_PACKED(), '[Failed] Found output dtype of activation quantizer not proper') self.assertEqual(graph1.get_op('kqtz1').dtype, PackedUint32(), '[Failed] Found output dtype of kernel quantizer not proper') self.assertEqual(graph1.get_op('conv2').dtype, Float32(), '[Failed] Found output dtype of conv not proper') print("Test pass #5 quantize_convolutions passed!")