Ejemplo n.º 1
0
    def test_prune_module(self):
        conv = MaskConv2d(32, 32, 3)
        G = 4
        prune_utils.prune_module(conv, G)

        self.assertFalse(torch.allclose(conv.mask,
                                        torch.ones(conv.mask.shape)))
Ejemplo n.º 2
0
    def prune_module(self,
                     name,
                     mod,
                     *args,
                     g_cfg=None,
                     fake_mask=False,
                     **kwargs):
        """ Prune a single module.

      We expect that after pruning, the mask in mod can be
      updated to a pruned result.
    
    Args:
      name(str): name of the module
      mod(MaskConv2d): the module to be pruned.
    """
        # TODO maybe not pass by kwargs
        G = self.args.num_groups
        if g_cfg is not None and name in g_cfg:
            G = g_cfg[name]["G"]
            # do some verification
            W = model_utils.get_weight_parameter(mod)
            F, C = W.shape[:2]
            assert F == g_cfg[name]["F"] and C == g_cfg[name]["C"]

        if fake_mask and isinstance(mod, MaskConv2d):
            mod.fake_mask = True

        prune_utils.prune_module(mod, G=G, MCPG=self.args.mcpg, **kwargs)
Ejemplo n.º 3
0
    def prune_module(self, name, mod, G, **kwargs):
        """ Prune a specific module.
            NOTE: G is known at this moment.
        """
        assert isinstance(mod, MaskConv2d)
        assert G >= 1, "{} has G={} smaller than 1".format(name, G)

        W = model_utils.get_weight_parameter(mod)
        C_out, C_in = W.shape[:2]

        if G == 1 or (C_out % G != 0 or C_in % G != 0):
            # NOTE: we return if this module cannot be pruned
            return

        prune_utils.prune_module(mod, G=G, **kwargs)