Пример #1
0
    def func(pretrained=False,
             tag=None,
             root='~/.mxnet/models',
             ctx=cpu(0),
             **kwargs):
        r"""Quantized model.

        Parameters
        ----------
        pretrained : bool or str
            Boolean value controls whether to load the default pretrained weights for model.
            String value represents the hashtag for a certain version of pretrained weights.
        tag : str, default is None
            Optional length-8 sha1sum of parameter file. If `None`, best parameter file
            will be used.
        ctx : Context, default CPU
            The context in which to load the pretrained weights.
        root : str, default $MXNET_HOME/models
            Location for keeping the model parameters.
        """
        from ..model_zoo import get_model
        from ..model_store import get_model_file
        curr_dir = os.path.abspath(os.path.dirname(__file__))
        model_name = name.replace('mobilenet1_', 'mobilenet1.')
        model_name = model_name.replace('mobilenet0_', 'mobilenet0.')
        json_file = os.path.join(curr_dir, '{}-symbol.json'.format(model_name))
        base_name = '_'.join(model_name.split('_')[:-1])
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            param_file = get_model_file(base_name, tag=tag,
                                        root=root) if pretrained else None
            net = get_model('_'.join(model_name.split('_')[:-1]),
                            prefix=sym_prefix)
            classes = getattr(net, 'classes', [])
            sym_net = SymbolBlock.imports(json_file, ['data'], None, ctx=ctx)
            if param_file:
                # directly imports weights saved by save_parameters is not applicable
                # so we hack it by load and export once to a temporary params file
                import tempfile
                net.load_params(param_file)
                net.hybridize()
                if '512' in base_name:
                    net(mx.nd.zeros((1, 3, 512, 512)))
                elif '300' in base_name:
                    net(mx.nd.zeros((1, 3, 300, 300)))
                else:
                    net(mx.nd.zeros((1, 3, 224, 224)))
                with tempfile.TemporaryDirectory() as tmpdirname:
                    prefix = os.path.join(tmpdirname, 'tmp')
                    net.export(prefix, epoch=0)
                    param_prefix = prefix + '-0000.params'
                    sym_net.collect_params().load(param_prefix)
        net.classes = classes
        net.reset_class = _not_impl
        net.set_nms = _not_impl
        return net
Пример #2
0
    def check_amp_convert_hybrid_block():
        # Test conversion for hybrid block on CPU
        model_cpu = get_model("resnet50_v1")
        model_cpu.collect_params().initialize(ctx=mx.cpu())
        model_cpu.hybridize()
        model_cpu(mx.nd.random.uniform(0, 1, shape=(1, 3, 224, 224), ctx=mx.cpu()))
        converted_model_cpu = amp.convert_hybrid_block(model_cpu)

        # Test with real world model, default inputs for convert_hybrid_block
        model = get_model("resnet50_v1")
        model.collect_params().initialize(ctx=mx.gpu())
        model.hybridize()
        model(mx.nd.zeros((1, 3, 224, 224)))
        converted_model = amp.convert_hybrid_block(model)
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224),
                                                     dtype=np.float32))
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224),
                                                     dtype=np.float32))

        # Test with real world model, tweak inputs for convert_hybrid_block
        converted_model = amp.convert_hybrid_block(model, target_dtype="float16",
                                                   target_dtype_ops=["Convolution"])
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224),
                                                      dtype=np.float32))
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224),
                                                     dtype=np.float32))

        # Check symbolic block
        dir_path = os.path.dirname(os.path.realpath(__file__))
        model_path = os.path.join(dir_path, 'model')
        if not os.path.isdir(model_path):
            os.mkdir(model_path)
        prefix, epoch = download_model("imagenet1k-resnet-18", dst_dir=model_path)
        net = SymbolBlock.imports(os.path.join(model_path, "imagenet1k-resnet-18-symbol.json"),
                                  input_names=["data", "softmax_label"],
                                  param_file=os.path.join(model_path, "imagenet1k-resnet-18-0000.params"))
        net.collect_params().reset_ctx(ctx=mx.gpu())
        net.hybridize()
        net(mx.nd.zeros((1, 3, 224, 224)), mx.nd.zeros((1,)))
        converted_model = amp.convert_hybrid_block(net)
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224)), mx.nd.zeros((1,)))
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224)), mx.nd.zeros((1,)))

        # Check symbolic block, tweaked inputs
        converted_model = amp.convert_hybrid_block(net, target_dtype="float16", target_dtype_ops=["Convolution"])
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224)), mx.nd.zeros((1, )))
        result = converted_model.forward(mx.nd.zeros((1, 3, 224, 224)), mx.nd.zeros((1, )))
        params = converted_model.collect_params()
        assert params["stage2_unit1_conv2_weight"].dtype == np.float32

        # Pass cast_optional_params as True to convert_hybrid_block
        converted_model = amp.convert_hybrid_block(net, target_dtype="float16", target_dtype_ops=["Convolution"],
                                                   cast_optional_params=True)
        params = converted_model.collect_params()
        assert params["stage2_unit1_conv2_weight"].dtype == np.float16
Пример #3
0
    def func(pretrained=False, tag=None, root='~/.mxnet/models', ctx=cpu(0), **kwargs):
        r"""Quantized model.

        Parameters
        ----------
        pretrained : bool or str
            Boolean value controls whether to load the default pretrained weights for model.
            String value represents the hashtag for a certain version of pretrained weights.
        tag : str, default is None
            Optional length-8 sha1sum of parameter file. If `None`, best parameter file
            will be used.
        ctx : Context, default CPU
            The context in which to load the pretrained weights.
        root : str, default $MXNET_HOME/models
            Location for keeping the model parameters.
        """
        from ..model_zoo import get_model
        from ..model_store import get_model_file
        curr_dir = os.path.abspath(os.path.dirname(__file__))
        model_name = name.replace('mobilenet1_', 'mobilenet1.')
        model_name = model_name.replace('mobilenet0_', 'mobilenet0.')
        json_file = os.path.join(curr_dir, '{}-symbol.json'.format(model_name))
        base_name = '_'.join(model_name.split('_')[:-1])
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            param_file = get_model_file(base_name, tag=tag, root=root) if pretrained else None
            net = get_model('_'.join(model_name.split('_')[:-1]), prefix=sym_prefix)
            classes = getattr(net, 'classes', [])
            sym_net = SymbolBlock.imports(json_file, ['data'], None, ctx=ctx)
            if param_file:
                # directly imports weights saved by save_parameters is not applicable
                # so we hack it by load and export once to a temporary params file
                import tempfile
                net.load_params(param_file)
                net.hybridize()
                if '512' in base_name:
                    net(mx.nd.zeros((1, 3, 512, 512)))
                elif '300' in base_name:
                    net(mx.nd.zeros((1, 3, 300, 300)))
                else:
                    net(mx.nd.zeros((1, 3, 224, 224)))
                with tempfile.TemporaryDirectory() as tmpdirname:
                    prefix = os.path.join(tmpdirname, 'tmp')
                    net.export(prefix, epoch=0)
                    param_prefix = prefix + '-0000.params'
                    sym_net.collect_params().load(param_prefix)
        sym_net.classes = classes
        sym_net.reset_class = _not_impl
        sym_net.set_nms = _not_impl
        return sym_net
Пример #4
0
 def load_net(self):
     net_cl = SymbolBlock.imports(self.net_path_cl, ['data'],
                                  self.params_path_cl)
     net_tl = SymbolBlock.imports(self.net_path_tl, ['data'],
                                  self.params_path_tl)
     return net_cl, net_tl