Esempio n. 1
0
 def export_model(self, filename, *args, **kwargs):
     """
     Used to export the compressed model for inference into the ONNX format.
     Makes method-specific preparations of the model graph,
     (e.g. removing auxiliary layers that were used for the model compression),
     then exports the model and dumps it into the output file.
     Parameters:
         `filename` - a path to the file for the exported model to be saved into.
         *args, **kwargs - if the model's `forward` requires additional parameters
         during export, specify these here.
     """
     model = self._model.eval().cpu()
     input_tensor_list = []
     for info in self._model.input_infos:
         single_batch_info = copy(info)
         input_shape = tuple([1] + list(info.shape)[1:])
         single_batch_info.shape = input_shape
         input_tensor_list.append(create_mock_tensor(single_batch_info, "cpu"))
     original_forward = model.forward
     model.forward = partial(model.forward, *args, **kwargs)
     # pylint:disable=unexpected-keyword-arg
     with torch.no_grad():
         torch.onnx.export(model, tuple(input_tensor_list),
                           filename, verbose=True, enable_onnx_checker=False, opset_version=10)
     model.forward = original_forward
def sr_dummy_forward_fn(model_, input_sample_sizes: Tuple[List[int]]):
    device = next(model_.parameters()).device
    config = {
        'input_info': [{
            "sample_size": sizes
        } for sizes in input_sample_sizes]
    }
    input_info_list = create_input_infos(config)
    tensor_list = [
        create_mock_tensor(info, device) for info in input_info_list
    ]
    for idx, tensor in enumerate(tensor_list):
        tensor_list[idx] = nncf_model_input(tensor)
    return model_(tuple(tensor_list))
Esempio n. 3
0
def sr_dummy_forward_fn(model_, input_sample_sizes: Tuple[List[int]]):
    device = next(model_.parameters()).device
    config = {
        'input_info': [{
            "sample_size": sizes
        } for sizes in input_sample_sizes]
    }
    input_info_list = create_input_infos(config)
    tensor_list = [
        create_mock_tensor(info, device) for info in input_info_list
    ]
    args = (tuple(tensor_list), )
    args, _ = sr_wrap_inputs_fn(args, {})
    return model_(*args)
Esempio n. 4
0
    def wrap_inputs(self, model_args, model_kwargs):
        bound_model_params = self._fwd_signature.bind(*model_args,
                                                      **model_kwargs)
        for param_name in self._fwd_params_to_input_infos_odict:
            param_kind = self._fwd_signature.parameters[param_name].kind
            if param_kind is Parameter.VAR_POSITIONAL or param_kind is Parameter.VAR_KEYWORD:
                nncf_logger.warning(
                    "An input_info tensor was bound to a *args or **kwargs variadic parameter in the"
                    "forward's signature! This is currently unsupported by NNCF. Input compression may "
                    "be incorrect.")
                # Currently won't support input info mapping to *args or **kwargs-mapped parameters
                continue

            if param_name not in bound_model_params.arguments:
                nncf_logger.warning(
                    "A call to a compressed model's forward occured without one of the params"
                    "specified in input_infos! Input compression may be incorrect. Trying to recover "
                    "by wrapping the default value for the parameter.")
                bound_model_params.apply_defaults()

            potential_tensor = bound_model_params.arguments[param_name]
            if potential_tensor is not None:
                bound_model_params.arguments[param_name] = nncf_model_input(
                    bound_model_params.arguments[param_name])
            else:
                # Default was None - cannot wrap as-is. Will wrap a dummy tensor as specified in
                # input infos - will conserve the call order of nncf_model_input nodes,
                # and the post-hooks for the input node will execute. The result won't go anywhere, though.
                nncf_logger.warning(
                    "Wrapping a dummy tensor for input {}".format(
                        'param_name'))
                info_for_missing_input = self._fwd_params_to_input_infos_odict[
                    param_name]
                device = 'cuda'
                if self._module_ref_for_device is not None:
                    device = next(
                        self._module_ref_for_device.parameters()).device
                dummy_tensor = create_mock_tensor(info_for_missing_input,
                                                  device)
                _ = nncf_model_input(dummy_tensor)

        return bound_model_params.args, bound_model_params.kwargs
Esempio n. 5
0
 def default_dummy_forward_fn(model):
     device = next(model.parameters()).device
     tensor_list = [
         create_mock_tensor(info, device) for info in input_infos
     ]
     return model(*tuple(tensor_list))