Пример #1
0
    def test_reogyolo_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'coords': 4,
            'classes': 20,
            'num': 5,
            'do_softmax': 1,
            'anchors': 5,
            'mask': 5,
        }
        params_flatten = {'axis': 1, 'end_axis': -1}
        merge_attrs_mock.return_value = {**params, **params_flatten}

        fake_pl = FakeRegionYoloProtoLayer(FakeMultiParam(params),
                                           FakeMultiParam(params_flatten))
        fake_node = FakeNode(fake_pl, None)

        RegionYoloFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "RegionYolo",
            'coords': 4,
            'classes': 20,
            'num': 5,
            'axis': 1,
            'end_axis': -1,
            'do_softmax': 1,
            'anchors': 5,
            'mask': 5,
            'infer': RegionYoloOp.regionyolo_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #2
0
    def test_check_phase_no_phase(self):
        phase_param = {}

        include_param = {'include': [FakeMultiParam(phase_param)]}

        graph = build_graph(nodes_attributes, [('node_1', 'node_2')],
                            {'node_1': {
                                'pb': FakeMultiParam(include_param)
                            }})

        node = Node(graph, 'node_1')
        res = check_phase(node)
        exp_res = {}
        self.assertEqual(res, exp_res)
Пример #3
0
    def test_argmax_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'out_max_val': True,
            'top_k': 100,
            'axis': 2
        }
        merge_attrs_mock.return_value = {
            **params
        }

        fake_pl = FakeArgMaxProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        ArgMaxFrontExtractor.extract(fake_node)

        exp_res = {
            'out_max_val': True,
            'top_k': 100,
            'axis': 2,
            'infer': ArgMaxOp.argmax_infer,
            'remove_values_output': True,
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
    def test_crop_ext(self, collect_attributes_mock):
        params = {
            'axis': 0,
            'offset': 0,
        }
        collect_attributes_mock.return_value = {
            **params,
            'test': 54,
            'test2': 'test3'
        }
        fake_pl = FakeCropProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        CropFrontExtractor.extract(fake_node)

        exp_res = {
            'type': 'Crop',
            'axis': 0,
            'offset': 0,
            'dim': None,  # set in infer
            'infer': crop_infer
        }

        for key in exp_res.keys():
            self.assertEqual(exp_res[key], fake_node[key])
Пример #5
0
 def test_pooling_ext(self):
     params = {
         'kernel_size': 1,
         'stride': 2,
         'pad': 3,
         'pool': 1,
         'global_pooling': 0,
         'ceil_mode': 0
     }
     node = PB({'pb': FakeProtoLayer(FakeMultiParam(params))})
     PoolingFrontExtractor.extract(node)
     res = node
     exp_res = {
         'window': np.array([1, 1, 1, 1], dtype=np.int64),
         'stride': np.array([1, 1, 2, 2], dtype=np.int64),
         'pad': np.array([[0, 0], [0, 0], [3, 3], [3, 3]], dtype=np.int64),
         'pad_spatial_shape': np.array([[3, 3], [3, 3]], dtype=np.int64),
         'pool_method': 'avg',
         'exclude_pad': 'false',
         'infer': Pooling.infer,
         'global_pool': 0,
         'output_spatial_shape': None,
         'pooling_convention': 'valid'
     }
     exp_res.update(layout_attrs())
     for i in exp_res.keys():
         if i in ('window', 'stride',
                  'pad', 'pad_spatial_shape',
                  'spatial_dims', 'batch_dims',
                  'channel_dims'):
             np.testing.assert_array_equal(res[i], exp_res[i])
         else:
             self.assertEqual(res[i], exp_res[i])
Пример #6
0
    def test_normalize_ext_ideal_numbers(self, collect_attributes_mock):
        params = {
            'across_spatial': 1,
            'channel_shared': 0,
            'eps': 0.00001
        }
        collect_attributes_mock.return_value = {
            **params
        }

        fake_pl = FakeNormalizeProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        NormalizeFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "Normalize",
            'across_spatial': 1,
            'channel_shared': 0,
            'eps': 0.00001,
            'infer': copy_shape_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #7
0
 def test_conv_ext_ideal_numbers(self, weights_biases_mock,
                                 layout_attrs_mock):
     weights_biases_mock.return_value = {}
     layout_attrs_mock.return_value = {}
     params = {
         'pad': 10,
         'kernel_size': 11,
         'stride': 12,
         'dilation': 13,
         'group': 14,
         'num_output': 15,
         'bias_term': True
     }
     node = PB({'pb': FakeConvProtoLayer(FakeMultiParam(params))})
     ConvFrontExtractor.extract(node)
     res = node
     exp_res = {
         'op': 'Conv2D',
         'pad': np.array([[0, 0], [0, 0], [10, 10], [10, 10]]),
         'pad_spatial_shape': np.array([[10, 10], [10, 10]]),
         'stride': np.array([1, 1, 12, 12]),
         'kernel_spatial': np.array([11, 11]),
         'dilation': np.array([1, 1, 13, 13]),
         'group': 14,
         'bias_addable': True,
         'bias_term': True,
     }
     self.assertTrue(weights_biases_mock.called)
     self.assertTrue(layout_attrs_mock.called)
     for key in exp_res.keys():
         if key in ('pad', 'pad_spatial_shape', 'stride', 'kernel_spatial',
                    'dilation'):
             np.testing.assert_equal(res[key], exp_res[key])
         else:
             self.assertEqual(res[key], exp_res[key])
Пример #8
0
    def test_proposal_ext_scale(self):
        params = {
            'param_str':
            "'feat_stride': 16, 'scale': [1,2,3], 'ratio':[5, 6,7]"
        }
        fake_pl = FakeProposalPythonProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        ProposalPythonFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "Proposal",
            'feat_stride': 16,
            'base_size': 16,
            'min_size': 16,
            'ratio': [5, 6, 7],
            'scale': [1, 2, 3],
            'pre_nms_topn': 6000,
            'post_nms_topn': 300,
            'nms_thresh': 0.7,
            'infer': ProposalOp.proposal_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #9
0
    def test_params_creation(self):
        params = {
            'pad': None,
            'kernel_size': None,
            'stride': None,
            'dilation': None,
            'group': 14,
            'num_output': 15,
            'bias_term': True,
            'pad_w': 3,
            'pad_h': 4,
            'kernel_w': 5,
            'kernel_h': 6,
            'stride_h': 3,
            'stride_w': 2,
        }
        exp_res = {
            'padding': [3, 4],
            'stride': [2, 3],
            'kernel': [5, 6],
            'dilate': [1, 1],
            'group': 14,
            'output': 15
        }
        res = conv_set_params(
            FakeConvProtoLayer(FakeMultiParam(params)).convolution_param,
            'Conv2D')

        for key in exp_res.keys():
            if key in ('padding', 'stride', 'stride', 'kernel', 'dilate'):
                np.testing.assert_equal(res[key], exp_res[key])
            else:
                self.assertEqual(res[key], exp_res[key])
Пример #10
0
 def test_inner_product_ext(self):
     params = {'num_output': 10, 'bias_term': True}
     mean_blob = np.array([1., 2.])
     variance_blob = np.array([3., 4.])
     blobs = [mean_blob, variance_blob]
     res = inner_product_ext(FakeProtoLayer(FakeMultiParam(params)),
                             FakeModelLayer(blobs))
     exp_res = {
         'type':
         'FullyConnected',
         'out-size':
         10,
         'infer':
         caffe_inner_product,
         'weights':
         mean_blob,
         'biases':
         variance_blob,
         'embedded_inputs': [(1, 'weights', {
             'bin': 'weights'
         }), (2, 'biases', {
             'bin': 'biases'
         })]
     }
     for i in exp_res:
         if i in ('weights', 'biases'):
             np.testing.assert_array_equal(res[i], exp_res[i])
         else:
             self.assertEqual(res[i], exp_res[i])
Пример #11
0
    def test_accum_ext(self, collect_attributes_mock):
        params = {
            'top_height': 200,
            'top_width': 300,
            'size_divisible_by': 3,
            'have_reference': 'False',
        }
        collect_attributes_mock.return_value = {**params, 'have_reference': 0}

        fake_pl = FakeAccumProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        AccumFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "Accum",
            'top_height': 200,
            'top_width': 300,
            'size_divisible_by': 3,
            'have_reference': 0,
            'infer': AccumOp.accum_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #12
0
    def test_proposal_ext_ideal_numbers(self, merge_attrs):
        params = {
            'feat_stride': 1,
            'base_size': 16,
            'min_size': 16,
            'ratio': 1,
            'scale': 2,
            'pre_nms_topn': 6000,
            'post_nms_topn': 300,
            'nms_thresh': 0.7
        }
        merge_attrs.return_value = {
            **params
        }

        fake_pl = FakeProposalProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        ProposalFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "Proposal",
            'feat_stride': 1,
            'base_size': 16,
            'min_size': 16,
            'ratio': 1,
            'scale': 2,
            'pre_nms_topn': 6000,
            'post_nms_topn': 300,
            'nms_thresh': 0.7,
            'infer': ProposalOp.proposal_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #13
0
 def test_build_net_not_equal_inputs(self):
     global my_mock_net
     input_node_param = {
         'shape': np.array([1, 3, 112, 112]),
         'reshape': MagicMock(return_value=134)
     }
     my_blobs = {
         'node_1': FakeMultiParam(input_node_param),
     }
     my_mock_net = Net(my_blobs)
     graph = build_graph(self.nodes_attributes, [('node_1', 'node_3'),
                                                 ('node_3', 'node_4'),
                                                 ('node_4', 'op_output')],
                         {
                             'node_4': {
                                 'shape': None
                             },
                             'node_1': {
                                 'shape': np.array([1, 3, 227, 227])
                             },
                             'node_3': {
                                 'top': 'top_node'
                             }
                         },
                         nodes_with_edges_only=True)
     graph.proto_path = 'path_to_proto'
     graph.caffemodel_path = 'path_to_proto'
     build_net(graph)
     my_mock_net.reshape.assert_called_once_with()
     my_mock_net.forward.assert_called_once_with()
     self.assertIsNotNone(graph.caffe_net)
Пример #14
0
    def test_interp_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'height': 1.1,
            'width': 2.2,
            'zoom_factor': 3.3,
            'shrink_factor': 4.4,
            'pad_beg': 5.5,
            'pad_end': 6.6
        }
        merge_attrs_mock.return_value = {
            **params, 'test': 54,
            'test2': 'test3'
        }

        fake_pl = FakeInterpProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)
        InterpFrontExtractor.extract(fake_node)

        exp_res = {
            'type': "Interp",
            'height': 1.1,
            'width': 2.2,
            'zoom_factor': 3.3,
            'shrink_factor': 4.4,
            'pad_beg': 5.5,
            'pad_end': 6.6,
            'infer': InterpOp.interp_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #15
0
    def test_create_default_weights(self):
        """
        There are situations when scale layer doesn't have weights and biases. This test checks that if they are not
        available in the caffemodel file then default values [1] and [0] are generated.
        """
        scale_blob = np.array([1])
        bias_blob = np.array([0])
        params = {'type': 'Scale', 'axis': 0, 'bias_term': True}

        res = scale_ext(FakeProtoLayer(FakeMultiParam(params)), None)
        exp_res = {
            'op':
            'ScaleShift',
            'type':
            'ScaleShift',
            'axis':
            0,
            'infer':
            copy_shape_infer,
            'weights':
            scale_blob,
            'biases':
            bias_blob,
            'embedded_inputs': [(1, 'weights', {
                'bin': 'weights'
            }), (2, 'biases', {
                'bin': 'biases'
            })]
        }
        self.assertDictEqual(exp_res, res)
Пример #16
0
    def test_scale_ext(self):
        mean_blob = np.array([1., 2.])
        variance_blob = np.array([3., 4.])
        blobs = [mean_blob, variance_blob]
        params = {'type': 'Scale', 'axis': 0, 'bias_term': True}

        res = scale_ext(FakeProtoLayer(FakeMultiParam(params)),
                        FakeModelLayer(blobs))
        exp_res = {
            'op':
            'ScaleShift',
            'type':
            'ScaleShift',
            'axis':
            0,
            'infer':
            copy_shape_infer,
            'weights':
            mean_blob,
            'biases':
            variance_blob,
            'embedded_inputs': [(1, 'weights', {
                'bin': 'weights'
            }), (2, 'biases', {
                'bin': 'biases'
            })]
        }
        for i in exp_res:
            if i in ('weights', 'biases'):
                np.testing.assert_array_equal(res[i], exp_res[i])
            else:
                self.assertEqual(res[i], exp_res[i])
Пример #17
0
    def test_simplernms_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'cls_threshold': 0.5,
            'max_num_proposals': 300,
            'iou_threshold': 0.7,
            'min_bbox_size': 16,
            'feat_stride': 16,
            'pre_nms_topn': 6000,
            'post_nms_topn': 150,
            'scale': [1, 2, 3]
        }
        merge_attrs_mock.return_value = {**params}

        fake_pl = FakeSimplerNMSProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        SimplerNMSFrontExtractor.extract(fake_node)

        exp_res = {
            'cls_threshold': 0.5,
            'max_num_proposals': 300,
            'iou_threshold': 0.7,
            'min_bbox_size': 16,
            'feat_stride': 16,
            'pre_nms_topn': 6000,
            'post_nms_topn': 150,
            'scale': [1, 2, 3],
            'infer': SimplerNMSOp.simplernms_infer
        }

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #18
0
    def test_resample_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'antialias': True,
            'height': 384,
            'width': 512,
            'type': 2,
            'factor': 1.0,
        }
        merge_attrs_mock.return_value = {
            'antialias': True,
            'height': 384,
            'width': 512,
            'type': 'caffe.ResampleParameter.LINEAR',
            'factor': 1.0,
        }
        fake_pl = FakeResampleProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        ResampleFrontExtractor.extract(fake_node)

        exp_res = {
            'op': 'Resample',
            'antialias': 1,
            'height': 384,
            'width': 512,
            'resample_type': 'caffe.ResampleParameter.LINEAR',
            'factor': 1.0,
            'infer': ResampleOp.resample_infer
        }

        for key in exp_res.keys():
            self.assertEqual(exp_res[key], fake_node[key])
Пример #19
0
 def test_pooling_ext_exception(self):
     params = {
         'kernel_size': 1,
         'stride': 2,
         'pad': 3,
         'pool': 3,
         'global_pooling': 1
     }
     node = PB({'pb': FakeProtoLayer(FakeMultiParam(params))})
     self.assertRaises(ValueError, PoolingFrontExtractor.extract, node)
Пример #20
0
 def test_python_extractor_for_extractors(self):
     module = 'test_module'
     layer = 'test_layer'
     CaffePythonFrontExtractorOp.registered_ops['{}.{}'.format(module, layer)] = FakePythonExtractor
     params = FakeMultiParam({
         'module': module,
         'layer': layer,
         'param_str': "'feat_stride': 16"
     })
     self.assertTrue(PythonFrontExtractorOp.extract(FakeNode(FakePythonProtoLayer(params), None)))
Пример #21
0
    def test_bias(self, embed_input_mock):
        embed_input_mock.return_value = {}
        params = {'axis': 1}
        add_node = FakeNode(FakeBiasProtoLayer(FakeMultiParam(params)),
                            FakeModelLayer([1, 2, 3, 4, 5]))
        BiasToAdd.extract(add_node)

        exp_res = {'type': "Add", 'axis': 1}

        for key in exp_res.keys():
            self.assertEqual(add_node[key], exp_res[key])
Пример #22
0
 def test_python_extractor_for_op(self):
     module = 'test_module'
     layer = 'test_layer'
     CaffePythonFrontExtractorOp.registered_ops['{}.{}'.format(module, layer)] = \
         lambda node: CaffePythonFrontExtractorOp.parse_param_str(node.pb.python_param.param_str)
     params = FakeMultiParam({
         'module': module,
         'layer': layer,
         'param_str': "'feat_stride': 16"
     })
     ext = PythonFrontExtractorOp.extract(FakeNode(FakePythonProtoLayer(params), None))
     self.assertEqual({'feat_stride': 16}, ext)
Пример #23
0
    def test_scale_2inputs_ext(self):
        params = {'type': 'Scale', 'axis': 0, 'bias_term': False}

        res = scale_ext(FakeProtoLayer(FakeMultiParam(params), True), None)
        exp_res = {
            'op': 'ScaleShift',
            'type': 'ScaleShift',
            'axis': 0,
            'infer': copy_shape_infer,
        }
        for i in exp_res:
            self.assertEqual(res[i], exp_res[i])
Пример #24
0
    def test_da_ext_ideal_numbers(self, merge_attrs_mock):
        params = {
            'crop_width': 0,
            'crop_height': 0,
            'write_augmented': "",
            'max_multiplier': 255.0,
            'augment_during_test': True,
            'recompute_mean': 0,
            'write_mean': "",
            'mean_per_pixel': False,
            'mean': 0,
            'mode': "add",
            'bottomwidth': 0,
            'bottomheight': 0,
            'num': 0,
            'chromatic_eigvec': [0.0]

        }
        merge_attrs_mock.return_value = {
            **params,
            'test': 54,
            'test2': 'test3'
        }
        fake_pl = FakeDAProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        DataAugmentationFrontExtractor.extract(fake_node)
        exp_res = {
            'type': 'DataAugmentation',
            'op': 'DataAugmentation',
            'crop_width': 0,
            'crop_height': 0,
            'write_augmented': "",
            'max_multiplier': 255.0,
            'augment_during_test': 1,
            'recompute_mean': 0,
            'write_mean': "",
            'mean_per_pixel': 0,
            'mean': 0,
            'mode': "add",
            'bottomwidth': 0,
            'bottomheight': 0,
            'num': 0,
            'chromatic_eigvec': [0.0],
            'infer': DataAugmentationOp.data_augmentation_infer
        }

        for key in exp_res.keys():
            if key in ('chromatic_eigvec',):
                np.testing.assert_equal(exp_res[key], fake_node[key])
            else:
                self.assertEqual(exp_res[key], fake_node[key])
Пример #25
0
 def test_lrn_ext_norm_reg(self):
     params = {'alpha': 10, 'beta': 15, 'local_size': 20, 'norm_region': 1}
     res = lrn_ext(FakeProtoLayer(FakeMultiParam(params)), None)
     exp_res = {
         'op': 'LRN',
         'type': 'Norm',
         'alpha': 10,
         'beta': 15,
         'local_size': 20,
         'region': 'same',
         'infer': copy_shape_infer
     }
     self.assertEqual(res, exp_res)
Пример #26
0
 def test_power_ext(self):
     params = {'power': 1, 'scale': 2, 'shift': 3}
     res = power_ext(FakeProtoLayer(FakeMultiParam(params)), None)
     exp_res = {
         'power': 1,
         'scale': 2,
         'shift': 3,
         'infer': copy_shape_infer,
         'op': "Power",
         'type': 'Power',
         'output_spatial_shape': None,
     }
     self.assertEqual(res, exp_res)
Пример #27
0
    def test_grn_ext_ideal_numbers(self, merge_attrs_mock):
        params = {'bias': 0.7}
        merge_attrs_mock.return_value = {**params}

        fake_pl = FakeGRNProtoLayer(FakeMultiParam(params))
        fake_node = FakeNode(fake_pl, None)

        GRNFrontExtractor.extract(fake_node)

        exp_res = {'type': "GRN", 'bias': 0.7, 'infer': copy_shape_infer}

        for key in exp_res.keys():
            self.assertEqual(fake_node[key], exp_res[key])
Пример #28
0
    def test_elu_ext(self, collect_attrs_mock):
        params = {'alpha': 4}
        collect_attrs_mock.return_value = {
            **params, 'test': 54,
            'test2': 'test3'
        }

        fn = FakeNode(FakeProtoLayer(FakeMultiParam(params)), None)
        ELUFrontExtractor.extract(fn)

        exp_res = {'type': 'Elu', 'alpha': 4}

        for i in exp_res:
            self.assertEqual(fn[i], exp_res[i])
Пример #29
0
    def test_permute_check_attrs(self):
        attrs = {'order': np.array([0, 1, 3, 2])}

        res = permute_ext(FakePermuteProtoLayer(FakeMultiParam(attrs)), None)
        exp_attrs = {
            'type': 'Permute',
            'op': 'Permute',
            'order': np.array([0, 1, 3, 2]),
            'infer': transpose_infer
        }
        for key in exp_attrs.keys():
            if key == 'order':
                np.testing.assert_equal(res[key], exp_attrs[key])
            else:
                self.assertEqual(res[key], exp_attrs[key])
Пример #30
0
    def test_relu_ext(self):
        params = {
            'negative_slope': 0.1,
        }

        res = relu_ext(FakeParam('relu_param', FakeMultiParam(params)), None)
        exp_res = {
            'negative_slope': 0.1,
            'infer': copy_shape_infer,
        }
        exp_res.update(layout_attrs())
        for i in exp_res.keys():
            if i == 'negative_slope':
                self.assertEqual(res[i], exp_res[i])
            else:
                np.testing.assert_array_equal(res[i], exp_res[i])