def test_const_floats(self, tf_dtype, np_dtype): shape = [1, 1, 200, 50] values = np.random.uniform(low=np.finfo(np.float32).min, high=np.finfo(np.float32).max, size=shape).astype(np_dtype) tensor_proto = tf.make_tensor_proto(values=values, dtype=tf_dtype, shape=shape) pb = PB({ "attr": PB({ "value": PB({ "tensor": PB({ "dtype": tensor_proto.dtype, "tensor_shape": tensor_proto.tensor_shape, "tensor_content": tensor_proto.tensor_content }) }) }) }) self.expected = { 'data_type': np_dtype, 'shape': np.asarray(shape, dtype=np.int), 'value': values } self.res = tf_const_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_const_floats(self, tf_dtype, np_dtype): shape = [1, 1, 50, 50] values = np.random.choice(a=[True, False], size=shape, p=[0.5, 0.5]) tensor_proto = tf.make_tensor_proto(values=values, dtype=tf_dtype, shape=shape) pb = PB({ "attr": PB({ "value": PB({ "tensor": PB({ "dtype": tensor_proto.dtype, "tensor_shape": tensor_proto.tensor_shape, "bool_val": values.tolist() }) }) }) }) self.expected = { 'data_type': np_dtype, 'shape': np.asarray(shape, dtype=np.int), 'value': values } self.res = tf_const_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_const_uints(self, tf_dtype, np_dtype): shape = [1, 1, 200, 50] values = np.random.randint(low=np.iinfo(np_dtype).min, high=np.iinfo(np_dtype).max, size=shape, dtype=np_dtype) tensor_proto = tf.make_tensor_proto(values=values, dtype=tf_dtype, shape=shape) pb = PB({"attr": PB({ "value": PB({ "tensor": PB({ "dtype": tensor_proto.dtype, "tensor_shape": tensor_proto.tensor_shape, }) }) })}) if tf_dtype == tf.uint16: setattr(pb.attr.value.tensor, "int_val", values.tolist()) else: setattr(pb.attr.value.tensor, "tensor_content", tensor_proto.tensor_content) self.expected = { 'data_type': np_dtype, 'shape': np.asarray(shape, dtype=np.int), 'value': values } self.res = tf_const_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_is_cyclic(self): pb = PB({}) node = PB({'pb': pb}) NextIterationExtractor.extract(node) self.expected = { 'is_cyclic': True, } self.res = node self.compare()
def _create_image_scaler_node(): pb = onnx.helper.make_node( 'ImageScaler', inputs=['a'], outputs=['b'], scale=1.0, bias=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], ) node = PB({'pb': pb, 'graph': PB({'graph': {'layout': 'NCHW'}})}) return node
def test_squeeze(self): pb = PB({'attr': {'squeeze_dims': PB({'list': PB({'i': [1, 2]})})}}) self.expected = { 'type': 'Reshape', 'squeeze_dims': np.array([1, 2], dtype=np.int8), } self.res = tf_squeeze_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_prod(self): pb = PB({'attr': { 'keep_dims': PB({'b': True}), }}) self.expected = { 'keep_dims': True, } self.res = tf_reduce_prod_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = (None, np.multiply.reduce) self.compare()
def test_eltwise_dtypes_map(self, dtype, np_type): node_pb = PB({'pb': PB({'attr': PB({'T': PB({"type": dtype})})})}) self.expected = { 'can_be_bias': True, 'data_type': np_type, 'operation': 'sum', 'type': 'Eltwise' } self.res = tf_op_extractors['Add'](node_pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_eltwise_relu(self, lambda_args, expected_res, expected_type): node_pb = PB({'pb': PB({'attr': PB({'T': PB({"type": 1})})})}) self.expected = {'data_type': np.float32, "type": "ReLU"} self.res = tf_op_extractors['Relu'](node_pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args eltwise_lambda = self.call_args[0][1] lambda_res = eltwise_lambda(lambda_args) self.check_lambda_res(actual=lambda_res, expected=expected_res, expected_type=expected_type) self.expected_call_args = None self.compare()
def test_concat(self): node = PB({'pb': PB({'attr': {'N': PB({'i': 4})}})}) self.expected = { 'N': 4, 'simple_concat': True, 'type': 'Concat', 'op': 'Concat', 'kind': 'op', 'axis': 1 } ConcatFrontExtractor.extract(node) self.res = node self.compare()
def test_matmul(self): pb = PB({'attr': { 'keep_dims': PB({'b': True}), }}) self.expected = { 'type': "Pooling", 'keep_dims': True, } self.res = tf_mean_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = (None, np.add.reduce) self.compare()
def test_concat(self): pb = PB({'attr': { 'N': PB({'i': 3}), }}) self.expected = { 'type': 'Concat', 'N': 3, } self.res = tf_concat_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = (None) self.compare()
def test_conv2d_depthwise(self): node = PB({ 'pb': PB({ 'attr': { 'data_format': PB({'s': b"NHWC"}), 'strides': PB({ 'list': PB({"i": self.strides}), }), 'dilations': PB({ 'list': PB({"i": self.dilations}), }), 'padding': PB({'s': b'VALID'}) } }) }) self.expected = { # spatial_dims = [1, 2] will be detected in infer function "channel_dims": [3], "batch_dims": [0], "input_feature_channel": 2, "output_feature_channel": 2, 'dilation': np.array([1, 1, 1, 1], dtype=np.int8), 'stride': np.array(self.strides, dtype=np.int8), } DepthwiseConv2dNativeFrontExtractor.extract(node) self.res = node self.expected_call_args = (None, True) self.compare()
def test_pool_defaults(self): pb = PB({'attr': { 'data_format': PB({ 's': b"NHWC" }), 'strides': PB({ 'list': PB({ "i": self.strides }) }), 'ksize': PB({ 'list': PB({"i": self.ksize}) }), 'padding': PB({ 's': b'VALID' }) }}) self.expected = { 'pad': None, # will be inferred when input shape is known 'pad_spatial_shape': None, 'type': 'Pooling', 'exclude_pad': 'true', } node = PB({'pb': pb}) AvgPoolFrontExtractor.extract(node) self.res = node self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = (None, None) self.compare()
def test_avg_pool_nchw(self): pb = PB({'attr': { 'data_format': PB({ 's': b"NCHW" }), 'strides': PB({ 'list': PB({ "i": self.strides }) }), 'ksize': PB({ 'list': PB({ "i": self.ksize }) }), 'padding': PB({ 's': b'VALID' }) }}) self.expected = { 'window': np.array(self.ksize, dtype=np.int8), 'spatial_dims': [2, 3], 'stride': np.array(self.strides, dtype=np.int8), 'pool_method': "avg", } node = PB({'pb': pb}) AvgPoolFrontExtractor.extract(node) self.res = node self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = (None, "avg") self.compare()
def test_conv2d_nchw(self): node = PB({ 'pb': PB({ 'attr': { 'data_format': PB({'s': b"NCHW"}), 'strides': PB({'list': PB({"i": self.strides})}), 'padding': PB({'s': b'VALID'}), 'dilations': PB({'list': PB({"i": [1, 1, 1, 1]})}) } }) }) self.expected = { # spatial_dims = [2, 3] will be detected in infer function "channel_dims": [1], "batch_dims": [0], "input_feature_channel": 2, "output_feature_channel": 3, 'dilation': np.array([1, 1, 1, 1], dtype=np.int8), 'stride': np.array(self.strides, dtype=np.int8), } Conv2DFrontExtractor.extract(node) self.res = node self.expected_call_args = (None, False) self.compare()
def _create_priorbox_node(aspect_ratio=[], min_size=np.array([]), max_size=np.array([]), flip=False, clip=False, variance=None, img_size=0, img_h=0, img_w=0, step=0, step_h=0, step_w=0, offset=0): pb = onnx.helper.make_node( 'PriorBox', inputs=['x'], outputs=['y'], aspect_ratio=aspect_ratio, min_size=min_size, max_size=max_size, flip=flip, clip=clip, variance=variance, img_size=img_size, img_h=img_h, img_w=img_w, step=step, step_h=step_h, step_w=step_w, offset=offset, ) node = PB({'pb': pb}) return node
def test_conv_ext_ideal_numbers(self): params = { 'attrs': { "kernel": "(3, 4)", "stride": "(3, 2)", "pad": "(7, 8)", "pool_type": "max" } } node = PB({'symbol_dict': params}) PoolingFrontExtractor.extract(node) exp_res = { 'op': 'Pooling', 'pad': np.array([[0, 0], [0, 0], [7, 7], [8, 8]]), 'pad_spatial_shape': np.array([[7, 7], [8, 8]]), 'stride': np.array([1, 1, 3, 2]), 'window': np.array([1, 1, 3, 4]), 'pool_method': 'max', 'exclude_pad': 'false', } for key in exp_res.keys(): if key in ('pad', 'stride', 'window', 'pad_spatial_shape'): np.testing.assert_equal(node[key], exp_res[key]) else: self.assertEqual(node[key], exp_res[key])
def test_multi_box_detection_check_attrs_without_top_k(self): params = { 'attrs': { "force_suppress": "True", "nms_threshold": "0.2", "threshold": "0.02", "variances": "(0.1, 0.1, 0.2, 0.2)" } } node = PB({'symbol_dict': params}) MultiBoxDetectionOutputExtractor.extract(node) exp_attrs = { 'type': 'DetectionOutput', 'num_classes': 21, 'keep_top_k': -1, 'variance_encoded_in_target': 0, 'code_type': "caffe.PriorBoxParameter.CENTER_SIZE", 'share_location': 1, 'confidence_threshold': 0.02, 'background_label_id': 0, 'nms_threshold': 0.2, 'top_k': -1, 'decrease_label_id': 1, 'clip_before_nms': 1, 'normalized': 1, } for key in exp_attrs.keys(): self.assertEqual(node[key], exp_attrs[key])
def test_conv_ext_ideal_numbers(self, weights_biases_mock, layout_attrs_mock): weights_biases_mock.return_value = {} layout_attrs_mock.return_value = {} params = { 'pad': 10, 'kernel_size': 11, 'stride': 12, 'dilation': 13, 'group': 14, 'num_output': 15, 'bias_term': True } node = PB({'pb': FakeConvProtoLayer(FakeMultiParam(params))}) ConvFrontExtractor.extract(node) res = node exp_res = { 'op': 'Conv2D', 'pad': np.array([[0, 0], [0, 0], [10, 10], [10, 10]]), 'pad_spatial_shape': np.array([[10, 10], [10, 10]]), 'stride': np.array([1, 1, 12, 12]), 'kernel_spatial': np.array([11, 11]), 'dilation': np.array([1, 1, 13, 13]), 'group': 14, 'bias_addable': True, 'bias_term': True, } self.assertTrue(weights_biases_mock.called) self.assertTrue(layout_attrs_mock.called) for key in exp_res.keys(): if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): np.testing.assert_equal(res[key], exp_res[key]) else: self.assertEqual(res[key], exp_res[key])
def test_pooling_ext(self): params = { 'kernel_size': 1, 'stride': 2, 'pad': 3, 'pool': 1, 'global_pooling': 0, 'ceil_mode': 0 } node = PB({'pb': FakeProtoLayer(FakeMultiParam(params))}) PoolingFrontExtractor.extract(node) res = node exp_res = { 'window': np.array([1, 1, 1, 1], dtype=np.int64), 'stride': np.array([1, 1, 2, 2], dtype=np.int64), 'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]], dtype=np.int64), 'pad_spatial_shape': np.array([[3, 3], [3, 3]], dtype=np.int64), 'pool_method': 'avg', 'exclude_pad': 'false', 'infer': Pooling.infer, 'global_pool': 0, 'output_spatial_shape': None, 'pooling_convention': 'valid' } exp_res.update(layout_attrs()) for i in exp_res.keys(): if i in ('window', 'stride', 'pad', 'pad_spatial_shape', 'spatial_dims', 'batch_dims', 'channel_dims'): np.testing.assert_array_equal(res[i], exp_res[i]) else: self.assertEqual(res[i], exp_res[i])
def test_conv_ext_with_bias(self): params = { 'attrs': { "kernel": "(4, 4)", "no_bias": "False", "num_filter": "21", "num_group": "14", "pad": "(4, 4)", "stride": "(2, 2)", "dilate": "(3, 3)", "workspace": "1536" } } node = PB({'symbol_dict': params}) DeconvFrontExtractor.extract(node) exp_res = { 'op': 'Deconvolution', 'pad': np.array([[0, 0], [0, 0], [4, 4], [4, 4]]), 'pad_spatial_shape': np.array([[4, 4], [4, 4]]), 'stride': np.array([1, 1, 2, 2]), 'kernel_spatial': np.array([4, 4]), 'dilation': np.array([1, 1, 3, 3]), 'group': 14, 'output': 21, 'bias_addable': True, 'bias_term': True, } for key in exp_res.keys(): if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): np.testing.assert_equal(node[key], exp_res[key]) else: self.assertEqual(node[key], exp_res[key])
def _create_elu_node(alpha=1.0): pb = onnx.helper.make_node('Elu', inputs=['x'], outputs=['y'], alpha=alpha) node = PB({'pb': pb}) return node
def _create_priorbox_clustered_node(width=np.array([]), height=np.array([]), flip=False, clip=False, variance=None, img_size=0, img_h=0, img_w=0, step=0, step_h=0, step_w=0, offset=0): pb = onnx.helper.make_node( 'PriorBoxClustered', inputs=['x'], outputs=['y'], width=width, height=height, flip=flip, clip=clip, variance=variance, img_size=img_size, img_h=img_h, img_w=img_w, step=step, step_h=step_h, step_w=step_w, offset=offset, ) node = PB({'pb': pb}) return node
def _create_do_node(num_classes=0, share_location=0, background_label_id=0, code_type="", variance_encoded_in_target=0, keep_top_k=0, confidence_threshold=0, nms_threshold=0, top_k=0, eta=0): pb = onnx.helper.make_node( 'DetectionOutput', inputs=['x'], outputs=['y'], num_classes=num_classes, share_location=share_location, background_label_id=background_label_id, code_type=code_type, variance_encoded_in_target=variance_encoded_in_target, keep_top_k=keep_top_k, confidence_threshold=confidence_threshold, # nms_param nms_threshold=nms_threshold, top_k=top_k, eta=eta, ) node = PB({'pb': pb}) return node
def test_stop_gradient(self): node = PB({ 'pb': PB({ 'attr': PB({ 'T': PB({ "type": 1 }) }) }) }) self.expected = { 'op': 'StopGradient' } StopGradientExtractor().extract(node) self.res = node self.compare()
def test_matmul(self): pb = PB({ 'attr': { 'transpose_a': PB({'b': True}), 'transpose_b': PB({'b': False}), } }) self.expected = { 'transpose_a': True, 'transpose_b': False, } self.res = tf_matmul_ext(pb=pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args self.expected_call_args = None self.compare()
def test_eltwise_mul(self, lambda_args, expected_res, expected_type): node_pb = PB({'pb': PB({'attr': PB({'T': PB({"type": 1})})})}) self.expected = { 'data_type': np.float32, 'operation': 'mul', 'type': 'Eltwise' } self.res = tf_op_extractors['Mul'](node_pb) self.res["infer"](None) self.call_args = self.infer_mock.call_args eltwise_lambda = self.call_args[0][1] lambda_res = eltwise_lambda(*lambda_args) self.check_lambda_res(actual=lambda_res, expected=expected_res, expected_type=expected_type) self.expected_call_args = None self.compare()
def test_simple_check(self): pb = PB({ 'attr': { 'str': PB({'s': "aaaa"}), 'int': PB({'i': 7}), 'float': PB({'f': 2.0}), 'bool': PB({'b': True}), 'lisint': PB({'list': PB({ 'i': 5, 'i': 6 })}) } }) res = collect_tf_attrs(pb.attr) # Reference results for given parameters ref = { 'str': pb.attr['str'].s, 'int': pb.attr['int'].i, 'float': pb.attr['float'].f, 'bool': pb.attr['bool'].b, 'lisint': pb.attr['lisint'].list.i } for attr in ref: self.assertEqual(res[attr], ref[attr])
def _create_flatten_node(axis): pb = onnx.helper.make_node( 'Flatten', inputs=['a'], outputs=['b'], axis=axis, ) node = PB({'pb': pb}) return node