def test_reogyolo_ext_ideal_numbers(self, merge_attrs_mock): params = { 'coords': 4, 'classes': 20, 'num': 5, 'do_softmax': 1, 'anchors': 5, 'mask': 5, } params_flatten = {'axis': 1, 'end_axis': -1} merge_attrs_mock.return_value = {**params, **params_flatten} fake_pl = FakeRegionYoloProtoLayer(FakeMultiParam(params), FakeMultiParam(params_flatten)) fake_node = FakeNode(fake_pl, None) RegionYoloFrontExtractor.extract(fake_node) exp_res = { 'type': "RegionYolo", 'coords': 4, 'classes': 20, 'num': 5, 'axis': 1, 'end_axis': -1, 'do_softmax': 1, 'anchors': 5, 'mask': 5, 'infer': RegionYoloOp.regionyolo_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_check_phase_no_phase(self): phase_param = {} include_param = {'include': [FakeMultiParam(phase_param)]} graph = build_graph(nodes_attributes, [('node_1', 'node_2')], {'node_1': { 'pb': FakeMultiParam(include_param) }}) node = Node(graph, 'node_1') res = check_phase(node) exp_res = {} self.assertEqual(res, exp_res)
def test_params_creation(self): params = { 'pad': None, 'kernel_size': None, 'stride': None, 'dilation': None, 'group': 14, 'num_output': 15, 'bias_term': True, 'pad_w': 3, 'pad_h': 4, 'kernel_w': 5, 'kernel_h': 6, 'stride_h': 3, 'stride_w': 2, } exp_res = { 'padding': [3, 4], 'stride': [2, 3], 'kernel': [5, 6], 'dilate': [1, 1], 'group': 14, 'output': 15 } res = conv_set_params( FakeConvProtoLayer(FakeMultiParam(params)).convolution_param, 'Conv2D') for key in exp_res.keys(): if key in ('padding', 'stride', 'stride', 'kernel', 'dilate'): np.testing.assert_equal(res[key], exp_res[key]) else: self.assertEqual(res[key], exp_res[key])
def test_build_net_not_equal_inputs(self): global my_mock_net input_node_param = { 'shape': np.array([1, 3, 112, 112]), 'reshape': MagicMock(return_value=134) } my_blobs = { 'node_1': FakeMultiParam(input_node_param), } my_mock_net = Net(my_blobs) graph = build_graph(self.nodes_attributes, [('node_1', 'node_3'), ('node_3', 'node_4'), ('node_4', 'op_output')], { 'node_4': { 'shape': None }, 'node_1': { 'shape': np.array([1, 3, 227, 227]) }, 'node_3': { 'top': 'top_node' } }, nodes_with_edges_only=True) graph.proto_path = 'path_to_proto' graph.caffemodel_path = 'path_to_proto' build_net(graph) my_mock_net.reshape.assert_called_once_with() my_mock_net.forward.assert_called_once_with() self.assertIsNotNone(graph.caffe_net)
def test_proposal_ext_scale(self): params = { 'param_str': "'feat_stride': 16, 'scale': [1,2,3], 'ratio':[5, 6,7]" } fake_pl = FakeProposalPythonProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) ProposalPythonFrontExtractor.extract(fake_node) exp_res = { 'type': "Proposal", 'feat_stride': 16, 'base_size': 16, 'min_size': 16, 'ratio': [5, 6, 7], 'scale': [1, 2, 3], 'pre_nms_topn': 6000, 'post_nms_topn': 300, 'nms_thresh': 0.7, 'infer': ProposalOp.proposal_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_simplernms_ext_ideal_numbers(self, merge_attrs_mock): params = { 'cls_threshold': 0.5, 'max_num_proposals': 300, 'iou_threshold': 0.7, 'min_bbox_size': 16, 'feat_stride': 16, 'pre_nms_topn': 6000, 'post_nms_topn': 150, 'scale': [1, 2, 3] } merge_attrs_mock.return_value = {**params} fake_pl = FakeSimplerNMSProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) SimplerNMSFrontExtractor.extract(fake_node) exp_res = { 'cls_threshold': 0.5, 'max_num_proposals': 300, 'iou_threshold': 0.7, 'min_bbox_size': 16, 'feat_stride': 16, 'pre_nms_topn': 6000, 'post_nms_topn': 150, 'scale': [1, 2, 3], 'infer': SimplerNMSOp.simplernms_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_proposal_ext_ideal_numbers(self, merge_attrs): params = { 'feat_stride': 1, 'base_size': 16, 'min_size': 16, 'ratio': 1, 'scale': 2, 'pre_nms_topn': 6000, 'post_nms_topn': 300, 'nms_thresh': 0.7 } merge_attrs.return_value = { **params } fake_pl = FakeProposalProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) ProposalFrontExtractor.extract(fake_node) exp_res = { 'type': "Proposal", 'feat_stride': 1, 'base_size': 16, 'min_size': 16, 'ratio': 1, 'scale': 2, 'pre_nms_topn': 6000, 'post_nms_topn': 300, 'nms_thresh': 0.7, 'infer': ProposalOp.proposal_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_accum_ext(self, collect_attributes_mock): params = { 'top_height': 200, 'top_width': 300, 'size_divisible_by': 3, 'have_reference': 'False', } collect_attributes_mock.return_value = {**params, 'have_reference': 0} fake_pl = FakeAccumProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) AccumFrontExtractor.extract(fake_node) exp_res = { 'type': "Accum", 'top_height': 200, 'top_width': 300, 'size_divisible_by': 3, 'have_reference': 0, 'infer': AccumOp.accum_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_pooling_ext(self): params = { 'kernel_size': 1, 'stride': 2, 'pad': 3, 'pool': 1, 'global_pooling': False, 'ceil_mode': 0 } node = PB({'pb': FakeProtoLayer(FakeMultiParam(params))}) PoolingFrontExtractor.extract(node) res = node exp_res = { 'window': np.array([1, 1, 1, 1], dtype=np.int64), 'stride': np.array([1, 1, 2, 2], dtype=np.int64), 'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]], dtype=np.int64), 'pad_spatial_shape': np.array([[3, 3], [3, 3]], dtype=np.int64), 'pool_method': 'avg', 'exclude_pad': False, 'infer': Pooling.infer, 'global_pool': False, 'output_spatial_shape': None, 'pooling_convention': 'valid' } exp_res.update(layout_attrs()) for i in exp_res.keys(): if i in ('window', 'stride', 'pad', 'pad_spatial_shape', 'spatial_dims', 'batch_dims', 'channel_dims'): np.testing.assert_array_equal(res[i], exp_res[i]) else: self.assertEqual(res[i], exp_res[i])
def test_conv_ext_ideal_numbers(self, weights_biases_mock, layout_attrs_mock): weights_biases_mock.return_value = {} layout_attrs_mock.return_value = {} params = { 'pad': 10, 'kernel_size': 11, 'stride': 12, 'dilation': 13, 'group': 14, 'num_output': 15, 'bias_term': True } node = PB({'pb': FakeConvProtoLayer(FakeMultiParam(params))}) ConvFrontExtractor.extract(node) res = node exp_res = { 'op': 'Conv2D', 'pad': np.array([[0, 0], [0, 0], [10, 10], [10, 10]]), 'pad_spatial_shape': np.array([[10, 10], [10, 10]]), 'stride': np.array([1, 1, 12, 12]), 'kernel_spatial': np.array([11, 11]), 'dilation': np.array([1, 1, 13, 13]), 'group': 14, 'bias_addable': True, 'bias_term': True, } self.assertTrue(weights_biases_mock.called) self.assertTrue(layout_attrs_mock.called) for key in exp_res.keys(): if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): np.testing.assert_equal(res[key], exp_res[key]) else: self.assertEqual(res[key], exp_res[key])
def test_python_extractor_for_extractors(self): module = 'test_module' layer = 'test_layer' CaffePythonFrontExtractorOp.registered_ops['{}.{}'.format(module, layer)] = FakePythonExtractor params = FakeMultiParam({ 'module': module, 'layer': layer, 'param_str': "'feat_stride': 16" }) self.assertTrue(PythonFrontExtractorOp.extract(FakeNode(FakePythonProtoLayer(params), None)))
def test_pooling_ext_exception(self): params = { 'kernel_size': 1, 'stride': 2, 'pad': 3, 'pool': 3, 'global_pooling': True } node = PB({'pb': FakeProtoLayer(FakeMultiParam(params))}) self.assertRaises(ValueError, PoolingFrontExtractor.extract, node)
def test_bias(self, embed_input_mock): embed_input_mock.return_value = {} params = {'axis': 1} add_node = FakeNode(FakeBiasProtoLayer(FakeMultiParam(params)), FakeModelLayer([1, 2, 3, 4, 5])) BiasToAdd.extract(add_node) exp_res = {'type': "Add", 'axis': 1} for key in exp_res.keys(): self.assertEqual(add_node[key], exp_res[key])
def test_python_extractor_for_op(self): module = 'test_module' layer = 'test_layer' CaffePythonFrontExtractorOp.registered_ops['{}.{}'.format(module, layer)] = \ lambda node: CaffePythonFrontExtractorOp.parse_param_str(node.pb.python_param.param_str) params = FakeMultiParam({ 'module': module, 'layer': layer, 'param_str': "'feat_stride': 16" }) ext = PythonFrontExtractorOp.extract(FakeNode(FakePythonProtoLayer(params), None)) self.assertEqual({'feat_stride': 16}, ext)
def test_da_ext_ideal_numbers(self, merge_attrs_mock): params = { 'crop_width': 0, 'crop_height': 0, 'write_augmented': "", 'max_multiplier': 255.0, 'augment_during_test': True, 'recompute_mean': 0, 'write_mean': "", 'mean_per_pixel': False, 'mean': 0, 'mode': "add", 'bottomwidth': 0, 'bottomheight': 0, 'num': 0, 'chromatic_eigvec': [0.0] } merge_attrs_mock.return_value = { **params, 'test': 54, 'test2': 'test3' } fake_pl = FakeDAProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) DataAugmentationFrontExtractor.extract(fake_node) exp_res = { 'type': 'DataAugmentation', 'op': 'DataAugmentation', 'crop_width': 0, 'crop_height': 0, 'write_augmented': "", 'max_multiplier': 255.0, 'augment_during_test': 1, 'recompute_mean': 0, 'write_mean': "", 'mean_per_pixel': 0, 'mean': 0, 'mode': "add", 'bottomwidth': 0, 'bottomheight': 0, 'num': 0, 'chromatic_eigvec': [0.0], 'infer': DataAugmentationOp.data_augmentation_infer } for key in exp_res.keys(): if key in ('chromatic_eigvec',): np.testing.assert_equal(exp_res[key], fake_node[key]) else: self.assertEqual(exp_res[key], fake_node[key])
def test_grn_ext_ideal_numbers(self, merge_attrs_mock): params = {'bias': 0.7} merge_attrs_mock.return_value = {**params} fake_pl = FakeGRNProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) GRNFrontExtractor.extract(fake_node) exp_res = {'type': "GRN", 'bias': 0.7, 'infer': copy_shape_infer} for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_elu_ext(self, collect_attrs_mock): params = {'alpha': 4} collect_attrs_mock.return_value = { **params, 'test': 54, 'test2': 'test3' } fn = FakeNode(FakeProtoLayer(FakeMultiParam(params)), None) ELUFrontExtractor.extract(fn) exp_res = {'type': 'Elu', 'alpha': 4} for i in exp_res: self.assertEqual(fn[i], exp_res[i])
def test_ctcgreedydecoder_ext_ideal_numbers(self, merge_attrs_mock): params = {'ctc_merge_repeated': True} merge_attrs_mock.return_value = {**params} fake_pl = FakeCTCGreedyDecoderProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) CTCGreedyDecoderFrontExtractor.extract(fake_node) exp_res = { 'type': "CTCGreedyDecoder", 'ctc_merge_repeated': 1, 'infer': CTCGreedyDecoderOp.infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_elu_ext_ideal_numbers(self, merge_attrs_mock): params = {'stride': 2} merge_attrs_mock.return_value = {**params} fake_pl = FakeReorgYoloProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) ReorgYoloFrontExtractor.extract(fake_node) exp_res = { 'type': "ReorgYolo", 'stride': 2, 'infer': ReorgYoloOp.reorgyolo_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_normalize_ext_ideal_numbers(self, collect_attributes_mock): params = {'across_spatial': 1, 'channel_shared': 0, 'eps': 0.00001} collect_attributes_mock.return_value = {**params} fake_pl = FakeNormalizeProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) NormalizeFrontExtractor.extract(fake_node) exp_res = { 'type': "Normalize", 'across_spatial': 1, 'channel_shared': 0, 'eps': 0.00001, } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_priorboxclustered_ext_ideal_numbers(self, merge_attrs_mock): params = { 'width': '30.0', 'height': '60.0', 'clip': False, 'flip': True, 'variance': np.array(['0.2', '0.3', '0.2', '0.3']), 'img_size': '300', 'img_h': '0', 'img_w': '0', 'step': '0,5', 'step_h': '0', 'step_w': '0', 'offset': '0.6' } merge_attrs_mock.return_value = {**params} fake_pl = FakePriorBoxClusteredProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) PriorBoxClusteredFrontExtractor.extract(fake_node) exp_res = { 'op': 'PriorBoxClustered', 'type': 'PriorBoxClustered', 'width': '30.0', 'height': '60.0', 'clip': 0, 'flip': 1, 'variance': np.array(['0.2', '0.3', '0.2', '0.3']), 'img_size': '300', 'img_h': '0', 'img_w': '0', 'step': '0,5', 'step_h': '0', 'step_w': '0', 'offset': '0.6' } for key in exp_res.keys(): if key in ['width', 'height', 'variance']: np.testing.assert_equal(fake_node[key], exp_res[key]) else: self.assertEqual(fake_node[key], exp_res[key])
def test_reogyolo_ext_ideal_numbers(self, merge_attrs_mock): params = {'channel_shared': False} merge_attrs_mock.return_value = {**params} fake_pl = FakePReLUProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) PreluFrontExtractor.extract(fake_node) exp_res = { 'type': 'PReLU', 'op': 'PReLU', 'channel_shared': 0, 'infer': PReLU.infer, } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_argmax_ext_ideal_numbers(self, merge_attrs_mock): params = {'out_max_val': True, 'top_k': 100, 'axis': 2} merge_attrs_mock.return_value = {**params} fake_pl = FakeArgMaxProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) ArgMaxFrontExtractor.extract(fake_node) exp_res = { 'out_max_val': True, 'top_k': 100, 'axis': 2, 'infer': arg_ops_infer, 'remove_values_output': True, } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_mvn_ext_ideal_numbers(self, collect_attributes_mock): params = { 'normalize_variance': 'True', 'across_channels': 'False', 'eps': 1e-9 } collect_attributes_mock.return_value = {'shift_file': 'some_file_path'} fake_pl = FakePowerFileProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) PowerFileFrontExtractor.extract(fake_node) exp_res = { 'type': "PowerFile", 'shift_file': 'some_file_path', 'infer': copy_shape_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_conv_ext_empty_numbers(self, weights_biases_mock, layout_attrs_mock): weights_biases_mock.return_value = {} layout_attrs_mock.return_value = {} params = { 'pad': None, 'kernel_size': None, 'stride': None, 'dilation': None, 'group': 14, 'num_output': 15, 'bias_term': True, 'pad_w': 3, 'pad_h': 4, 'kernel_w': 5, 'kernel_h': 6, 'stride_h': 3, 'stride_w': 2, } node = PB({'pb': FakeConvProtoLayer(FakeMultiParam(params))}) res = DeconvFrontExtractor.extract(node) res = node exp_res = { 'op': 'Deconv2D', 'pad': np.array([[0, 0], [0, 0], [4, 4], [3, 3]]), 'pad_spatial_shape': np.array([[4, 4], [3, 3]]), 'stride': np.array([1, 1, 3, 2]), 'kernel_spatial': np.array([6, 5]), 'dilation': np.array([1, 1, 1, 1]), 'group': 14, 'bias_addable': True, } self.assertTrue(weights_biases_mock.called) self.assertTrue(layout_attrs_mock.called) for key in exp_res.keys(): if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial', 'dilation'): np.testing.assert_equal(res[key], exp_res[key]) else: self.assertEqual(res[key], exp_res[key])
def test_st_ext_ideal_numbers(self, merge_attrs_mock): params = { 'transform_type': "ffff", 'sampler_type': "gggg", 'output_H': 56, 'output_W': 78, 'to_compute_dU': True, 'theta_1_1': 0.1, 'theta_1_2': 0.2, 'theta_1_3': 0.3, 'theta_2_1': 0.4, 'theta_2_2': 0.5, 'theta_2_3': 0.6 } merge_attrs_mock.return_value = {**params} fake_pl = FakeSpatialTransformProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) SpatialTransformFrontExtractor.extract(fake_node) exp_res = { 'type': "SpatialTransformer", 'transform_type': "ffff", 'sampler_type': "gggg", 'output_H': 56, 'output_W': 78, 'to_compute_dU': 1, 'theta_1_1': 0.1, 'theta_1_2': 0.2, 'theta_1_3': 0.3, 'theta_2_1': 0.4, 'theta_2_2': 0.5, 'theta_2_3': 0.6, 'infer': SpatialTransformOp.sp_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_resample_ext_ideal_numbers(self, merge_attrs_mock): params = { 'pad': 20, 'kernel_size': 1, 'max_displacement': 20, 'stride_1': 1, 'stride_2': 2, 'single_direction': 0, 'do_abs': False, 'correlation_type': 'caffe.CorrelationParameter.MULTIPLY' } merge_attrs_mock.return_value = { **params, 'test': 54, 'test2': 'test3' } fake_pl = FakeCorrProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) CorrelationFrontExtractor.extract(fake_node) exp_res = { 'type': "Correlation", 'pad': 20, 'kernel_size': 1, 'max_displacement': 20, 'stride_1': 1, 'stride_2': 2, 'single_direction': 0, 'do_abs': False, 'correlation_type': 'caffe.CorrelationParameter.MULTIPLY', 'infer': CorrelationOp.corr_infer } for key in exp_res.keys(): self.assertEqual(fake_node[key], exp_res[key])
def test_crop_ext(self, collect_attributes_mock): params = { 'axis': 0, 'offset': 0, } collect_attributes_mock.return_value = { **params, 'test': 54, 'test2': 'test3' } fake_pl = FakeCropProtoLayer(FakeMultiParam(params)) fake_node = FakeNode(fake_pl, None) CropFrontExtractor.extract(fake_node) exp_res = { 'type': 'Crop', 'axis': 0, 'offset': 0, 'dim': None, # set in infer 'infer': crop_infer } for key in exp_res.keys(): self.assertEqual(exp_res[key], fake_node[key])
def test_get_attrs(self): exp_attrs = {"test_attr_1": 12, "test_attr_2": "sdf sdf"} param_str = "'test_attr_1': 12, 'test_attr_2': 'sdf sdf'" attrs = CaffePythonFrontExtractorOp.get_attrs( FakePythonParam(FakeMultiParam({'param_str': param_str}))) self.assertEqual(exp_attrs, attrs)