Пример #1
0
    def __init__(self, target_model: NNCFNetwork,
                 sparsified_module_info: List[SparseModuleInfo],
                 config: NNCFConfig):
        super().__init__(target_model, sparsified_module_info)
        self._config = config
        self._algo_config = extract_algo_specific_config(
            self._config, 'magnitude_sparsity')
        params = self._algo_config.get('params', {})

        self._weight_importance_fn = WEIGHT_IMPORTANCE_FUNCTIONS[params.get(
            'weight_importance', 'normed_abs')]
        self._mode = params.get('sparsity_level_setting_mode', 'global')
        self._scheduler = None
        sparsity_init = self._algo_config.get('sparsity_init', 0)

        if self._mode == 'global':
            scheduler_params = deepcopy(params)
            scheduler_params['sparsity_init'] = sparsity_init
            scheduler_cls = SPARSITY_SCHEDULERS.get(
                params.get('schedule', 'polynomial'))
            self._scheduler = scheduler_cls(self, scheduler_params)
        else:
            self._scheduler = StubCompressionScheduler()

        self._bn_adaptation = None

        self.set_sparsity_level(sparsity_init)
Пример #2
0
    def __init__(self, target_model, config: NNCFConfig, op_names):
        super().__init__(target_model, op_names)
        algo_config = extract_algo_specific_config(config,
                                                   'magnitude_sparsity')
        params = deepcopy(algo_config.get('params', {}))
        self._threshold = 0
        self._frozen = False
        self._weight_importance_fn = WEIGHT_IMPORTANCE_FUNCTIONS[params.get(
            'weight_importance', 'normed_abs')]

        sparsity_init = algo_config.get('sparsity_init', 0)
        params['sparsity_init'] = sparsity_init
        scheduler_type = params.get('schedule', 'polynomial')

        if scheduler_type == 'adaptive':
            raise ValueError(
                'Magnitude sparsity algorithm do not support adaptive scheduler'
            )

        scheduler_cls = SPARSITY_SCHEDULERS.get(scheduler_type)
        self._scheduler = scheduler_cls(self, params)
        self._loss = TFZeroCompressionLoss()
        self._bn_adaptation = None
        self._config = config
        self.set_sparsity_level(sparsity_init)
Пример #3
0
    def __init__(self, target_model, config: NNCFConfig, op_names: List[str]):
        super().__init__(target_model, op_names)
        algo_config = extract_algo_specific_config(config, "rb_sparsity")
        sparsity_init = algo_config.get('sparsity_init', 0)
        params = deepcopy(algo_config.get('params', {}))
        params['sparsity_init'] = sparsity_init
        sparsity_level_mode = params.get('sparsity_level_setting_mode', 'global')

        if sparsity_level_mode == 'local':
            raise NotImplementedError('RB sparsity algorithm do not support local sparsity loss')

        target_ops = []
        for wrapped_layer, _, op in get_nncf_operations(self.model, self._op_names):
            target_ops.append(
                (op, wrapped_layer.get_operation_weights(op.name))
            )

        self._loss = SparseLoss(target_ops)
        schedule_type = params.get('schedule', 'exponential')

        if schedule_type == 'adaptive':
            raise NotImplementedError('RB sparsity algorithm do not support adaptive scheduler')

        scheduler_cls = SPARSITY_SCHEDULERS.get(schedule_type)
        self._scheduler = scheduler_cls(self, params)
        self.set_sparsity_level(sparsity_init)
Пример #4
0
    def __init__(self, target_model: NNCFNetwork, config: NNCFConfig):
        super().__init__(target_model)

        self._loss = ZeroCompressionLoss(
            next(target_model.parameters()).device)
        scheduler_cls = QUANTIZATION_SCHEDULERS.get("staged")
        algo_config = extract_algo_specific_config(config, "binarization")
        self._scheduler = scheduler_cls(self, algo_config.get("params", {}))
        from nncf.torch.utils import is_main_process
        if is_main_process():
            self._compute_and_display_flops_binarization_rate()
Пример #5
0
 def __init__(self,
              target_model: tf.keras.Model,
              op_names: List[str],
              prunable_types: List[str],
              pruned_layer_groups_info: Clusterization[PrunedLayerInfo],
              config):
     super().__init__(target_model)
     self._op_names = op_names
     self._prunable_types = prunable_types
     self.config = config
     self.pruning_config = extract_algo_specific_config(config,
                                                        "filter_pruning")
     params = self.pruning_config.get('params', {})
     self.pruning_init = self.pruning_config.get('pruning_init', 0)
     self.pruning_level = self.pruning_init
     self._pruned_layer_groups_info = pruned_layer_groups_info
     self.prune_flops = False
     self._check_pruning_level(params)
Пример #6
0
 def __init__(self, target_model: NNCFNetwork, prunable_types: List[str],
              pruned_module_groups_info: Clusterization[PrunedModuleInfo],
              config: NNCFConfig):
     super().__init__(target_model)
     self._loss = ZeroCompressionLoss(
         next(target_model.parameters()).device)
     self._prunable_types = prunable_types
     self.config = config
     self.pruning_config = extract_algo_specific_config(
         config, 'filter_pruning')
     params = self.pruning_config.get('params', {})
     self.pruned_module_groups_info = pruned_module_groups_info
     self.prune_batch_norms = params.get('prune_batch_norms', True)
     self.prune_first = params.get('prune_first_conv', False)
     self.prune_downsample_convs = params.get('prune_downsample_convs',
                                              False)
     self.prune_flops = False
     self.check_pruning_level(params)
     self._hooks = []
Пример #7
0
    def get_redefinable_global_param_value_for_algo(
            self, param_name: str, algo_name: str) -> Optional:
        """
        Some parameters can be specified both on the global NNCF config .json level (so that they apply
        to all algos), and at the same time overridden in the algorithm-specific section of the .json.
        This function returns the value that should apply for a given algorithm name, considering the
        exact format of this config.

        :param param_name: The name of a parameter in the .json specification of the NNCFConfig, that may
          be present either at the top-most level of the .json, or at the top level of the algorithm-specific
          subdict.
        :param algo_name: The name of the algorithm (among the allowed algorithm names in the .json) for which
          the resolution of the redefinable parameter should occur.
        :return: The value of the parameter that should be applied for the algo specified by `algo_name`.
        """
        from nncf.config.extractors import extract_algo_specific_config
        algo_config = extract_algo_specific_config(self, algo_name)
        param = self.get(param_name)
        algo_specific_param = algo_config.get(param_name)
        if algo_specific_param is not None:
            param = algo_specific_param
        return param
Пример #8
0
    def __init__(self, target_model: NNCFNetwork,
                 sparsified_module_info: List[SparseModuleInfo],
                 config: NNCFConfig):
        super().__init__(target_model, sparsified_module_info)
        algo_config = extract_algo_specific_config(config, 'rb_sparsity')
        params = deepcopy(algo_config.get('params', {}))

        self._distributed = False
        self._mode = params.get('sparsity_level_setting_mode', 'global')
        self._check_sparsity_masks = params.get('check_sparsity_masks', False)

        sparsify_operations = [m.operand for m in self.sparsified_module_info]
        if self._mode == 'local':
            self._loss = SparseLossForPerLayerSparsity(sparsify_operations)
            self._scheduler = StubCompressionScheduler()
        else:
            self._loss = SparseLoss(sparsify_operations)

            sparsity_init = algo_config.get('sparsity_init', 0)
            params['sparsity_init'] = sparsity_init
            scheduler_cls = SPARSITY_SCHEDULERS.get(
                params.get('schedule', 'exponential'))
            self._scheduler = scheduler_cls(self, params)
            self.set_sparsity_level(sparsity_init)
Пример #9
0
 def _get_algo_specific_config_section(self) -> Dict:
     return extract_algo_specific_config(self.config, self.name)