Ejemplo n.º 1
0
    def test_update_sym(self):
        mod = mx.module.Module.load('tests/data/testnetv1',
                                    0,
                                    label_names=['softmaxoutput1_label'])
        mh = model_handler.ModelHandler(mod, mx.context.cpu, 1)
        symbol = mh.symbol.get_internals()[self.act1_id]
        self.mh.aux_params = {1: 2, 2: 3, 4: 5, 7: 3, 6: 8, 3: 5, 12: 5}

        mh.update_sym(symbol)

        assert mh.layer_type_dict == OrderedDict([('conv1', 'Convolution'),
                                                  ('act1', 'Activation')])
        assert mh.symbol.tojson() == symbol.tojson()
        self._compare_symbols(mh.symbol, symbol)
        assert sorted(mh.arg_params.keys()) == sorted(
            ['conv1_weight', 'conv1_bias'])
        assert mh.aux_params == {}

        mod = mx.module.Module.load('tests/data/testnetv1',
                                    0,
                                    label_names=['softmaxoutput1_label'])
        mh = model_handler.ModelHandler(mod, mx.context.cpu, 1)
        symbol = mh.symbol.get_internals()[self.conv2_id]

        mh.update_sym(symbol)

        assert mh.layer_type_dict == OrderedDict([('conv1', 'Convolution'),
                                                  ('act1', 'Activation'),
                                                  ('conv2', 'Convolution')])
        assert mh.symbol.tojson() == symbol.tojson()
        self._compare_symbols(mh.symbol, symbol)
        assert sorted(mh.arg_params.keys()) == sorted(
            ['conv1_weight', 'conv2_bias', 'conv1_bias', 'conv2_weight'])
        assert mh.aux_params == {}
Ejemplo n.º 2
0
 def setUp(self):
     np.random.seed(1)
     self.data_name = 'data'
     mod = mx.module.Module.load('tests/data/testnetv1',
                                 0,
                                 label_names=['softmaxoutput1_label'],
                                 data_names=(self.data_name, ))
     self.mh = model_handler.ModelHandler(mod, mx.context.cpu, 1,
                                          self.data_name)
     self.imglist = [[0, 'accordion/image_0001.jpg'],
                     [0, 'accordion/image_0002.jpg'],
                     [1, 'ant/image_0001.jpg'], [1, 'ant/image_0002.jpg'],
                     [2, 'anchor/image_0001.jpg'],
                     [2, 'anchor/image_0002.jpg']]
     self.image_iter = mx.image.ImageIter(
         2, (3, 224, 224),
         imglist=self.imglist,
         path_root='tests/data/test_images',
         label_name='softmaxoutput1_label',
         data_name=self.data_name)
     self.symbol_dict = json.loads(self.mh.symbol.tojson())
     self.nodes = self.symbol_dict['nodes']
     self.arg_nodes = self.symbol_dict['arg_nodes']
     self.heads = self.symbol_dict['heads']
     self.act1_id = 4
     self.conv2_id = 7
Ejemplo n.º 3
0
 def test_constructor_no_weight(self):
     # Assert module that is unbinded can be used to init a ModelHandler object and can add/drop layers
     sym, _, _ = mx.model.load_checkpoint('tests/data/testnetv1', 0)
     mod = mx.module.Module(sym, label_names=['softmaxoutput1_label'])
     mh = model_handler.ModelHandler(mod, mx.context.cpu, 1)
     assert np.array_equal(mh.arg_params, {})
     assert np.array_equal(mh.aux_params, {})
     mh.drop_layer_top()
     mh.drop_layer_bottom()
Ejemplo n.º 4
0
 def test_constructor_binded_module(self):
     # Assert module that is binded can be used to init a ModelHandler object and can add/drop layers
     mod = mx.module.Module.load('tests/data/testnetv1',
                                 0,
                                 label_names=['softmaxoutput1_label'])
     mod.bind(data_shapes=[('data', self.image_iter.provide_data[0][1])],
              label_shapes=self.image_iter.provide_label)
     assert mod.binded and mod.params_initialized
     mh = model_handler.ModelHandler(mod, mx.context.cpu, 1)
     mh.drop_layer_top()
     mh.drop_layer_bottom()
Ejemplo n.º 5
0
    def test_resnet(self):
        """Assert that ModelHandler can drop layers from resnet without errors"""
        RepurposerTestUtils.download_resnet()
        mod = mx.mod.Module.load('resnet-101', 0)
        mh = model_handler.ModelHandler(mod)
        old_layer_names = mh.layer_names

        mh.drop_layer_top()
        mh.drop_layer_bottom()

        assert sorted(
            list(set(old_layer_names).difference(set(
                mh.layer_names)))) == sorted(['bn_data', 'softmax'])
Ejemplo n.º 6
0
    def _build_split_net():
        """Instantiate MH for a model that diverges into two and then joins back into one."""
        data = mx.sym.var('data')
        data = mx.sym.flatten(data=data, name='flatten0')
        fc1 = mx.sym.FullyConnected(data, num_hidden=5, name='a_1')
        fc2 = mx.sym.FullyConnected(fc1, num_hidden=5, name='a_2')
        fc3 = mx.sym.FullyConnected(fc2, num_hidden=5, name='a_3')
        fc2 = mx.sym.FullyConnected(data, num_hidden=5, name='b_1')
        fc2b = mx.sym.FullyConnected(fc2, num_hidden=5, name='b_2')
        plus = fc3.__add__(fc2b)

        softmax = mx.sym.SoftmaxOutput(plus, name='softmax')
        mod = mx.mod.Module(softmax)
        mh = model_handler.ModelHandler(mod)

        plus_layer_name = mh.layer_names[6]

        return mh, plus_layer_name
Ejemplo n.º 7
0
    def test_drop_layer_top_two_outputs(self):
        # Build a symbol with two softmax output layers
        data = mx.sym.Variable('data')
        fc1 = mx.sym.FullyConnected(data=data, name='fc1', num_hidden=128)
        act1 = mx.sym.Activation(data=fc1, name='relu1', act_type="relu")
        fc2 = mx.sym.FullyConnected(data=act1, name='fc2', num_hidden=64)
        act2 = mx.sym.Activation(data=fc2, name='relu2', act_type="relu")
        fc3 = mx.sym.FullyConnected(data=act2, name='fc3', num_hidden=10)

        fc4 = mx.sym.FullyConnected(data=fc3, name='fc4_1', num_hidden=10)
        sm1 = mx.sym.SoftmaxOutput(data=fc4, name='softmax1')
        fc5 = mx.sym.FullyConnected(data=fc3, name='fc4_2', num_hidden=10)
        sm2 = mx.sym.SoftmaxOutput(data=fc5, name='softmax2')

        softmax = mx.sym.Group([sm1, sm2])

        mod = mx.mod.Module(softmax,
                            label_names=['softmax1_label', 'softmax2_label'])
        mh = model_handler.ModelHandler(mod)

        with self.assertRaises(exceptions.ModelError):
            mh.drop_layer_top()