def create_sample_graph(data1: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [1, 5, 5, 3], Float32()) # Conv1 w1 = Constant('weight1', Float32(), data1) conv1 = Conv('conv1', [1, 4, 4, 3], QUANTIZED_PACKED(), { 'X': x, 'W': w1 }, kernel_shape=[2, 2]) conv1.is_quantized = True pool1 = SpaceToDepth('s2d', [1, 2, 2, 12], Float32(), {'input': conv1}) # One output y = Output('output', [1, 2, 2, 12], Float32(), {'input': pool1}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_expected_graph(data: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [1, 5, 5, 3], Float32()) # constant and internal nodes w = Constant('weight', Float32(), data) q = QTZ_binary_mean_scaling('qtz1', [1, 2, 2, 3], Float32(), {'input': w}) # Conv conv = Conv('conv', [1, 4, 4, 3], Float32(), { 'X': x, 'W': q }, kernel_shape=[2, 2]) # One output rs = Reshape('reshape', [1, 48], Float32(), {'data': conv}) y = Output( 'output', [1, 48], Float32(), {'input': rs}, ) # add ops to the graph graph.add_op_and_inputs(y) return graph
def make_simple_model(self) -> Model: graph = Graph() # two inputs x = Input( 'input', [1, 5, 5, 3], Float32(), ) w = Constant( 'weight', Float32(), np.zeros([1, 2, 2, 3]), dimension_format='NHWC', ) # Conv conv = Conv('conv', [1, 4, 4, 1], Float32(), { 'X': x, 'W': w }, kernel_shape=[2, 2]) # One output y = Output('output', [1, 4, 4, 1], Float32(), {'input': conv}) # add ops to the graph graph.add_op_and_inputs(y) model = Model() model.graph = graph return model
def test_conv(self) -> None: """Test code for Conv.""" # get Conv's input names i_names = Conv.input_names self.assertTrue({'X', 'W'}.issubset(set(i_names))) # set x to MaxPool m's input x = Input( 'input', [1, 3, 3, 3], Float32(), ) w = Constant( 'weight', Float32(), np.zeros([1, 2, 2, 5]) ) inputs: Dict[str, Operator] = {i_names[0]: x, i_names[1]: w} c = Conv( "conv1", [1, 2, 2, 3], Float32(), inputs, kernel_shape=[2, 2] ) self.assertEqual(c.batchsize, 1) self.assertEqual(c.height, 2) self.assertEqual(c.width, 2) self.assertEqual(c.channel, 3) self.assertEqual(c.kernel_height, 2) self.assertEqual(c.kernel_width, 2) print("Conv test passed!")
def create_sample_graph(data1: np.ndarray, data2: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [1, 5, 5, 3], Float32()) # Conv1 w1 = Constant('weight1', Float32(), data1) conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2]) # activation quantizer s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {'X': conv1, 'Y': s1, 'Z': s2}) # Conv2 w2 = Constant('weight2', Float32(), data2) kq = QTZ_binary_mean_scaling('kqtz1', [1, 2, 2, 3], Float32(), {'input': w2}) conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), {'X': aq, 'W': kq}, kernel_shape=[2, 2]) conv2.a_quantizer = [aq] conv2.quantizer = kq # One output y = Output('output', [1, 3, 3, 3], Float32(), {'input': conv2}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_sample_graph() -> Graph: graph = Graph() x = Input('placeholder', [2], Float32()) s1 = Constant('potato_1', Float32(), np.array([1, 2])) s2 = Constant('potato_2', Float32(), np.array([1, 3])) add1 = Add('potatoes', [2], Float32(), {'A': s1, 'B': s2}) add2 = Add('more_potatoes', [2], Float32(), {'A': x, 'B': add1}) # One output y = Output('output', [2], Float32(), {'input': add2}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def test_conv_consistency(self) -> None: """Test code for Conv.""" x = Input( 'const1', [1, 3, 3, 3], Float32(), ) w = Constant('weight', Float32(), np.zeros([1, 2, 2, 3])) input_ops = {'X': cast(Operator, x), 'W': cast(Operator, w)} add = Conv('conv_under_test', [1, 3, 3, 3], Float32(), input_ops, pads=[1, 1, 2, 2], strides=[2, 2]) print("Consistency test for conv operator passed!")
def create_sample_graph(data1: np.ndarray, data2: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [1, 5, 5, 3], Float32()) # Conv1 w1 = Constant('weight1', Float32(), data1) conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2]) # activation quantizer s1 = Constant('aq_const1', Int32(), np.array([2], dtype=np.int32)) s2 = Constant('aq_const2', Float32(), np.array([2.0], dtype=np.float32)) aq1 = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), {'X': conv1, 'Y': s1, 'Z': s2}) # Conv2 w2 = Constant('weight2', Float32(), data2) kq = QTZ_binary_mean_scaling('kqtz1', [1, 2, 2, 3], Float32(), {'input': w2}) conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), {'X': aq1, 'W': kq}, kernel_shape=[2, 2]) conv2.a_quantizer = [aq1] conv2.quantizer = kq conv2.is_quantized = True sc = Constant('bn_scale', Float32(), np.random.rand(3)) be = Constant('bn_b', Float32(), np.random.rand(3)) mu = Constant('bn_mu', Float32(), np.random.rand(3)) va = Constant('bn_var', Float32(), np.random.rand(3)) bn = BatchNormalization('bn', [1, 3, 3, 3], Float32(), {'X': conv2, 'scale': sc, 'B': be, 'mean': mu, 'var': va}) # activation quantizer s3 = Constant('aq_const3', Int32(), np.array([2], dtype=np.int32)) s4 = Constant('aq_const4', Float32(), np.array([2.0], dtype=np.float32)) aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), {'X': bn, 'Y': s3, 'Z': s4}) # One output y = Output('output', [1, 3, 3, 3], Float32(), {'input': aq2}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_transposed_graph(self, data: np.ndarray) -> Graph: graph = Graph() data = data.transpose([3, 2, 1, 0]) # input x = Input('placeholder', [1, 5, 5, 3], Float32(), dimension_format='NHWC') # constant and internal nodes w = Constant('weight', Float32(), data, dimension_format='NHWC') i = Identity('identity1', [1, 2, 2, 3], Float32(), {'input': w}, dimension_format='NHWC') q = QTZ_binary_mean_scaling('qtz1', [1, 2, 2, 3], Float32(), {'input': i}, dimension_format='NHWC') # Conv conv = Conv('conv', [1, 4, 4, 3], Float32(), { 'X': x, 'W': q }, kernel_shape=[2, 2], dimension_format='NHWC') rs = Reshape('reshape', [1, 48], Float32(), {'data': conv}) # One output y = Output( 'output', [1, 48], Float32(), {'input': rs}, ) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_sample_graph_2(data1: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [1, 5, 5, 3], Float32()) # Conv1 w1 = Constant('weight1', Float32(), data1) conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), {'X': x, 'W': w1}, kernel_shape=[2, 2]) s1 = Constant('const1', Float32(), np.zeros([1, 4, 4, 3])) add1 = Add('add', [1, 4, 4, 3], Float32(), {'A': conv1, 'B': s1}) y = Output('output', [1, 4, 4, 3], Float32(), {'input': add1}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def test_graph_conv(self) -> None: """Test code for making a simple graph with Conv.""" graph = Graph() # two inputs x = Input( 'input', [1, 5, 5, 3], Float32(), ) w = Constant('weight', Float32(), np.zeros([1, 2, 2, 3])) # Conv conv = Conv( 'conv', [1, 4, 4, 3], Float32(), { 'X': x, 'W': w }, # you can get these keys by 'Conv.input_names' kernel_shape=[2, 2]) # One output y = Output( 'output', [1, 4, 4, 3], Float32(), {'input': conv} # you can get this key by 'Output.input_names' ) # add ops to the graph graph.add_op(x) graph.add_op(w) graph.add_op(conv) graph.add_op(y) self.assertTrue(graph.check_nodes(), "All inputs of operators must match their outputs.") print("Graph test passed!")
def create_sample_graph(data: np.ndarray) -> Graph: graph = Graph() # input x = Input('placeholder', [3, 5, 5, 1], Float32(), dimension_format='CWHN') # constant and internal nodes w = Constant('weight', Float32(), data, dimension_format='CWHN') i1 = Identity('identity1', [3, 2, 2, 1], Float32(), {'input': w}, dimension_format='CWHN') q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 1], Float32(), {'input': i1}, dimension_format='CWHN') # Conv conv = Conv('conv', [3, 4, 4, 1], Float32(), { 'X': x, 'W': q }, kernel_shape=[2, 2], dimension_format='CWHN') # One output rs = Reshape('reshape', [1, 48], Float32(), {'data': conv}) y = Output( 'output', [1, 48], Float32(), {'input': rs}, ) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_graph(self, graph): x1 = Input( 'input1', [1, 4, 4, 3], Float32(), ) w1 = Constant( 'weight1', Float32(), np.zeros([1, 2, 2, 3]) ) conv1 = Conv( 'conv1', [1, 3, 3, 3], Float32(), {'X': x1, 'W': w1}, kernel_shape=[2, 2] ) w2 = Constant( 'weight2', Float32(), np.zeros([3, 2, 2, 3]) ) conv2 = Conv( 'conv2', [1, 2, 2, 3], Float32(), {'X': conv1, 'W': w2}, kernel_shape=[2, 2] ) x2 = Input( 'input2', [3, 3, 3, 3], Float32(), ) x3 = Input( 'input3', [3, 3, 3, 3], Float32(), ) conv3 = Conv( 'conv3', [3, 2, 2, 3], Float32(), {'X': x2, 'W': conv2}, kernel_shape=[2, 2] ) conv4 = Conv( 'conv4', [1, 2, 2, 3], Float32(), {'X': x3, 'W': conv3}, kernel_shape=[2, 2] ) y = Output( 'output', [1, 2, 2, 3], Float32(), {'input': conv4} ) # add ops to the graph graph.add_op_and_inputs(y)
def create_sample_graph(self, data1: np.ndarray, data2: np.ndarray, data3: np.ndarray) -> Graph: graph = Graph() # input x = Input( 'placeholder', [1, 5, 5, 3], Float32(), ) # constant and internal nodes w = Constant('weight', Float32(), data1) i = Identity('identity1', [3, 2, 2, 3], Float32(), {'input': w}) t = Transpose('transpose1', [3, 2, 2, 3], Float32(), {'data': i}, perm=[3, 2, 1, 0]) q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 3], Float32(), {'input': t}) # Conv conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), { 'X': x, 'W': q }, kernel_shape=[2, 2]) i2 = Identity('identity2', [1, 4, 4, 3], Float32(), {'input': conv1}) s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), { 'X': i2, 'Y': s1, 'Z': s2 }) dummy = Transpose('dummy', [1, 4, 4, 3], Float32(), {'data': aq}, perm=[0, 1, 2, 3]) w2 = Constant('weight2', Float32(), data2) q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Float32(), {'input': w2}) conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), { 'X': dummy, 'W': q2 }, kernel_shape=[2, 2]) s3 = Constant('aq_const1', Float32(), np.array(1)) s4 = Constant('aq_const2', Float32(), np.array(2)) aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), { 'X': conv2, 'Y': s3, 'Z': s4 }) w3 = Constant('weight3', Float32(), data3) i3 = Identity('identity3', [1, 3, 3, 3], Float32(), {'input': aq2}) conv3 = Conv('conv3', [1, 2, 2, 3], Float32(), { 'X': i3, 'W': w3 }, kernel_shape=[2, 2]) # One output y = Output('output', [1, 2, 2, 3], Float32(), {'input': conv3}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_precompute_graph(self, data1: np.ndarray, data2: np.ndarray, data3: np.ndarray) -> Graph: graph = Graph() # two inputs x = Input( 'placeholder', [1, 5, 5, 3], Float32(), ) scaling1, qdata = self.binary_mean_scaling( data1.transpose([3, 2, 1, 0])) w = Constant('weight', Float32(), qdata * scaling1) # Conv conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), { 'X': x, 'W': w }, kernel_shape=[2, 2]) s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), { 'X': conv1, 'Y': s1, 'Z': s2 }) dummy = Transpose('dummy', [1, 4, 4, 3], Float32(), {'data': aq}, perm=[0, 1, 2, 3]) scaling2, qdata2 = self.binary_mean_scaling(data2) w2 = Constant('weight2', Float32(), qdata2 * scaling2) conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), { 'X': dummy, 'W': w2 }, kernel_shape=[2, 2]) s3 = Constant('aq_const1', Float32(), np.array(1)) s4 = Constant('aq_const2', Float32(), np.array(2)) aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), { 'X': conv2, 'Y': s3, 'Z': s4 }) w3 = Constant('weight3', Float32(), data3) conv3 = Conv('conv3', [1, 2, 2, 3], Float32(), { 'X': aq2, 'W': w3 }, kernel_shape=[2, 2]) # One output y = Output('output', [1, 2, 2, 3], Float32(), {'input': conv3}) # add ops to the graph graph.add_op_and_inputs(y) return graph
def create_quantized_graph(self, data: np.ndarray, data2: np.ndarray, data3: np.ndarray) \ -> Tuple[Graph, np.float32, np.float32]: graph = Graph() # two inputs x = Input( 'placeholder', [1, 5, 5, 3], Float32(), ) from modules.packer import Packer packer = Packer(1, 32) data = data.transpose([3, 2, 1, 0]) scaling, qdata = self.binary_mean_scaling(data) shape = list(data.shape) w = Constant( 'weight', Float32(), qdata * scaling, ) q = QTZ_binary_mean_scaling('qtz1', shape, Float32(), {'input': w}) q.scaling_factor = scaling # Conv conv1 = Conv( 'conv1', [1, 4, 4, 3], Float32(), { 'X': x, 'W': w }, kernel_shape=[2, 2], ) s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], QUANTIZED_NOT_PACKED(), { 'X': conv1, 'Y': s1, 'Z': s2 }) dummy = Transpose('dummy', [1, 4, 4, 3], QUANTIZED_NOT_PACKED(), {'data': aq}, perm=[0, 1, 2, 3]) scaling2, qdata2 = self.binary_mean_scaling(data2) w2 = Constant('weight2', Uint32(), packer.run(qdata2), packed=True, actual_shape=[3, 2, 2, 3]) # quantizer connected to conv2 as 'conv2.quantizer' q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Uint32(), {'input': w2}) q2.scaling_factor = scaling2 conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), { 'X': dummy, 'W': w2 }, kernel_shape=[2, 2], quantized=True) conv2.quantizer = q2 s3 = Constant('aq_const1', Float32(), np.array(1)) s4 = Constant('aq_const2', Float32(), np.array(2)) aq2 = QTZ_linear_mid_tread_half('aqtz2', [1, 3, 3, 3], Float32(), { 'X': conv2, 'Y': s3, 'Z': s4 }) w3 = Constant('weight3', Float32(), data3) conv3 = Conv('conv3', [1, 2, 2, 3], Float32(), { 'X': aq2, 'W': w3 }, kernel_shape=[2, 2]) # One output y = Output('output', [1, 2, 2, 3], Float32(), {'input': conv3}) # add ops to the graph graph.add_op_and_inputs(y) return graph, scaling, scaling2
def create_quantized_graph2(self, data1: np.ndarray, data2: np.ndarray, data3: np.ndarray) -> Graph: graph = Graph() # input x = Input( 'placeholder', [1, 5, 5, 3], Float32(), ) # constant and internal nodes scaling1, qdata1 = self.binary_mean_scaling(data1) w = Constant('weight', Float32(), qdata1 * scaling1) q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 3], Float32(), {'input': w}) # Conv conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), { 'X': x, 'W': w }, kernel_shape=[2, 2]) s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], QUANTIZED_NOT_PACKED(), { 'X': conv1, 'Y': s1, 'Z': s2 }) from modules.packer import Packer packer = Packer(1, 32) scaling2, qdata2 = self.binary_mean_scaling(data2) w2 = Constant('weight2', Uint32(), packer.run(qdata2), packed=True, actual_shape=[3, 2, 2, 3]) q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Float32(), {'input': w2}) q2.scaling_factor = scaling2 conv2 = Conv( 'conv2', [1, 3, 3, 3], Float32(), { 'X': aq, 'W': w2 }, kernel_shape=[2, 2], quantized=True, ) conv2.quantizer = q2 scaling3, qdata3 = self.binary_mean_scaling(data3) w3 = Constant('weight2', Uint32(), packer.run(qdata3), packed=True, actual_shape=[3, 2, 2, 3]) q3 = QTZ_binary_mean_scaling('qtz3', [3, 2, 2, 3], Float32(), {'input': w3}) q3.scaling_factor = scaling3 conv3 = Conv('conv3', [1, 3, 3, 3], Float32(), { 'X': aq, 'W': w3 }, kernel_shape=[2, 2], quantized=True) conv3.quantizer = q3 y1 = Output('output1', [1, 3, 3, 3], Float32(), {'input': conv2}) y2 = Output('output2', [1, 3, 3, 3], Float32(), {'input': conv3}) # add ops to the graph graph.add_op_and_inputs(y1) graph.add_op_and_inputs(y2) return graph, scaling2, scaling3
def create_sample_graph3(self, data1: np.ndarray, data2: np.ndarray, data3: np.ndarray) -> Graph: graph = Graph() # input x = Input( 'placeholder', [1, 5, 5, 3], Float32(), ) # constant and internal nodes w = Constant('weight', Float32(), data1) q = QTZ_binary_mean_scaling('qtz1', [3, 2, 2, 3], Float32(), {'input': w}) # Conv conv1 = Conv('conv1', [1, 4, 4, 3], Float32(), { 'X': x, 'W': q }, kernel_shape=[2, 2]) i2 = Identity('identity2', [1, 4, 4, 3], Float32(), {'input': conv1}) s1 = Constant('aq_const1', Float32(), np.array(1)) s2 = Constant('aq_const2', Float32(), np.array(2)) aq = QTZ_linear_mid_tread_half('aqtz1', [1, 4, 4, 3], Float32(), { 'X': i2, 'Y': s1, 'Z': s2 }) w2 = Constant('weight2', Float32(), data2) q2 = QTZ_binary_mean_scaling('qtz2', [3, 2, 2, 3], Float32(), {'input': w2}) conv2 = Conv('conv2', [1, 3, 3, 3], Float32(), { 'X': aq, 'W': q2 }, kernel_shape=[2, 2]) w3 = Constant('weight3', Float32(), data3) q3 = QTZ_binary_mean_scaling('qtz3', [3, 2, 2, 3], Float32(), {'input': w3}) conv3 = Conv('conv3', [1, 3, 3, 3], Float32(), { 'X': aq, 'W': q3 }, kernel_shape=[2, 2]) y1 = Output('output1', [1, 3, 3, 3], Float32(), {'input': conv2}) y2 = Output('output2', [1, 3, 3, 3], Float32(), {'input': conv3}) # add ops to the graph graph.add_op_and_inputs(y1) graph.add_op_and_inputs(y2) return graph