Exemple #1
0
 def test_disable_loss(self, module, frozen):
     model = sparse_model(module, frozen)
     sw = model.sparsifier
     assert sw.frozen is (True if frozen is None else frozen)
     loss = SparseLoss([model.sparsifier])
     loss.disable()
     assert sw.frozen
 def test_disable_loss(self, module, sparsify):
     model = sparse_model(module, sparsify)
     sw = model.sparsifier
     assert sw.sparsify is (False if sparsify is None else sparsify)
     loss = SparseLoss([model.sparsifier])
     loss.disable()
     assert not sw.sparsify
Exemple #3
0
    def __init__(self, target_model: NNCFNetwork,
                 sparsified_module_info: List[SparseModuleInfo], params):
        super().__init__(target_model, sparsified_module_info)

        self._distributed = False
        self._loss = SparseLoss()  # type: SparseLoss
        scheduler_cls = SPARSITY_SCHEDULERS.get(
            params.get("schedule", "exponential"))
        self._scheduler = scheduler_cls(self, params)
        sparsify_operations = [m.operand for m in self.sparsified_module_info]
        self._loss.set_layers(sparsify_operations)
        self._check_sparsity_masks = params.get("check_sparsity_masks", False)
Exemple #4
0
 def test_get_target_sparsity_rate(self, module, target, expected_rate):
     model = sparse_model(module, None)
     loss = SparseLoss([model.sparsifier])
     if target is not None:
         loss.target = target
     actual_rate = None
     try:
         actual_rate = loss.target_sparsity_rate
         if expected_rate is None:
             pytest.fail("Exception should be raised")
     except IndexError:
         if expected_rate is not None:
             pytest.fail("Exception is not expected")
     if expected_rate is not None:
         assert actual_rate == expected_rate
Exemple #5
0
 def __init__(self):
     self.set_sparsity_level = mocker.stub()
     self.freeze = mocker.stub()
     from nncf.sparsity.rb.loss import SparseLoss
     self.loss = SparseLoss()
     self.loss.current_sparsity = 0.3
     self.sparsity_init = 0
Exemple #6
0
 def __init__(self, target_model: NNCFNetwork,
              sparsified_module_info: List[SparseModuleInfo],
              params):
     super().__init__(target_model, sparsified_module_info)
     self._scheduler = None
     self._distributed = False
     sparsity_level_mode = params.get("sparsity_level_setting_mode", "global")
     sparsify_operations = [m.operand for m in self.sparsified_module_info]
     if sparsity_level_mode == 'local':
         self._loss = SparseLossForPerLayerSparsity(sparsify_operations)
     else:
         self._loss = SparseLoss(sparsify_operations)  # type: SparseLoss
         schedule_type = params.get("schedule", "exponential")
         scheduler_cls = SPARSITY_SCHEDULERS.get(schedule_type)
         self._scheduler = scheduler_cls(self, params)
     self._check_sparsity_masks = params.get("check_sparsity_masks", False)
Exemple #7
0
 def test_calc_loss(self, module, frozen, raising):
     model = sparse_model(module, frozen)
     sw = model.sparsifier
     assert sw.frozen is (True if frozen is None else frozen)
     loss = SparseLoss([model.sparsifier])
     try:
         assert loss() == 0
     except ZeroDivisionError:
         pytest.fail("Division by zero")
     except AssertionError:
         if not raising:
             pytest.fail("Exception is not expected")
Exemple #8
0
 def test_create_loss__with_defaults(self, module):
     model = sparse_model(module, None)
     loss = SparseLoss([model.sparsifier])
     assert not loss.disabled
     assert loss.target_sparsity_rate == 0
     assert loss.p == 0.05
Exemple #9
0
class RBSparsityController(BaseSparsityAlgoController):
    def __init__(self, target_model: NNCFNetwork,
                 sparsified_module_info: List[SparseModuleInfo], params):
        super().__init__(target_model, sparsified_module_info)

        self._distributed = False
        self._loss = SparseLoss()  # type: SparseLoss
        scheduler_cls = SPARSITY_SCHEDULERS.get(
            params.get("schedule", "exponential"))
        self._scheduler = scheduler_cls(self, params)
        sparsify_operations = [m.operand for m in self.sparsified_module_info]
        self._loss.set_layers(sparsify_operations)
        self._check_sparsity_masks = params.get("check_sparsity_masks", False)

    def set_sparsity_level(self, sparsity_level):
        self._loss.target = 1 - sparsity_level

    def freeze(self):
        self._loss.disable()

    def distributed(self):
        if not dist.is_initialized():
            raise KeyError(
                'Could not set distributed mode for the compression algorithm '
                'because the default process group has not been initialized.')

        if next(self._model.parameters()).is_cuda:
            state = torch.cuda.get_rng_state()
            if dist.get_backend() == dist.Backend.NCCL:
                state = state.cuda()
            torch.distributed.broadcast(state, src=0)
            torch.cuda.set_rng_state(state.cpu())
        else:
            state = torch.get_rng_state()
            torch.distributed.broadcast(state, src=0)
            torch.set_rng_state(state)

        self._distributed = True

    def check_distributed_masks(self):
        if not self._distributed or get_world_size() == 1:
            return 1

        nvalues = 0
        ncor_values = 0
        eps = 1e-4
        for minfo in self.sparsified_module_info:
            mask = minfo.operand.mask

            mask_list = [
                torch.empty_like(mask) for _ in range(get_world_size())
            ]
            # nccl does not support gather, send, recv operations
            dist.all_gather(mask_list, mask)

            for i in range(1, len(mask_list)):
                rel_error = (mask_list[0] - mask_list[i]) / mask_list[0]
                ncor_values = ncor_values + (rel_error.abs() < eps).sum(
                    dtype=mask.dtype)
                nvalues = nvalues + mask_list[i].numel()

        return ncor_values / nvalues

    def add_algo_specific_stats(self, stats):
        stats["target_sparsity_rate"] = self.loss.target_sparsity_rate
        if self._distributed and self._check_sparsity_masks:
            stats["masks_consistents"] = self.check_distributed_masks()
        return stats