Пример #1
0
 def accept_pruned_input(self, model: NNCFNetwork, graph: NNCFGraph,
                         node_module):
     accept_pruned_input = True
     if node_module.groups != 1:
         if not is_depthwise_conv(node_module):
             accept_pruned_input = False
     return accept_pruned_input
Пример #2
0
 def node_propagate_can_prune_attr(self, nncf_node: NNCFNode) -> bool:
     """
     Whether node propagates can_prune attr through. That means a node can propagate pruning mask
      (for example,  activations propagate mask, but convolutions stop mask propagation)
     :param nncf_node: node to work with
     :return: bool: propagates this node can_prune throw or not
     """
     node_module = self.model.get_module_by_scope(nncf_node.op_exec_context.scope_in_model)
     node_type = nncf_node.op_exec_context.operator_name
     is_conv = node_type in Convolution().get_all_op_aliases()
     return not is_conv or (is_conv and is_depthwise_conv(node_module))
Пример #3
0
    def _is_module_prunable(self, target_model: NNCFNetwork, module, module_scope: Scope):
        """
        Check whether we should prune module according to algorithm parameters.
        :param target_model: model to work with
        :param module: module to check
        :param module_scope: scope for module
        :return: (prune: bool, msg: str)
        prune: Whether we should prune module
        msg: additional information why we should/shouldn't prune
        """
        prune = True
        msg = None

        pruned_types = self.get_op_types_of_pruned_modules()
        input_non_pruned_modules = get_first_pruned_modules(target_model, pruned_types + ['linear'])
        output_non_pruned_modules = get_last_pruned_modules(target_model, pruned_types + ['linear'])
        module_scope_str = str(module_scope)

        if self.ignore_frozen_layers and not module.weight.requires_grad:
            msg = "Ignored adding Weight Pruner in scope: {} because"\
                    " the layer appears to be frozen (requires_grad=False)".format(module_scope_str)
            prune = False
        elif not self._should_consider_scope(module_scope_str):
            msg = "Ignored adding Weight Pruner in scope: {}".format(module_scope_str)
            prune = False
        elif not self.prune_first and module in input_non_pruned_modules:
            msg = "Ignored adding Weight Pruner in scope: {} because"\
                             " this scope is one of the first convolutions".format(module_scope_str)
            prune = False
        elif not self.prune_last and module in output_non_pruned_modules:
            msg = "Ignored adding Weight Pruner in scope: {} because"\
                             " this scope is one of the last convolutions".format(module_scope_str)
            prune = False
        elif is_grouped_conv(module):
            if not is_depthwise_conv(module):
                msg = "Ignored adding Weight Pruner in scope: {} because" \
                      " this scope is grouped convolution".format(module_scope_str)
                prune = False
        elif not self.prune_downsample_convs and is_conv_with_downsampling(module):
            msg = "Ignored adding Weight Pruner in scope: {} because"\
                             " this scope is convolution with downsample".format(module_scope_str)
            prune = False
        return prune, msg
Пример #4
0
    def _prune_weights(self, target_model: NNCFNetwork):
        device = next(target_model.parameters()).device
        modules_to_prune = target_model.get_nncf_modules()
        insertion_commands = []
        bn_for_depthwise = {}

        input_non_pruned_modules = get_first_pruned_modules(
            target_model,
            self.get_types_of_pruned_modules() + ['linear'])
        output_non_pruned_modules = get_last_pruned_modules(
            target_model,
            self.get_types_of_pruned_modules() + ['linear'])

        for module_scope, module in modules_to_prune.items():
            # Check that we need to prune weights in this op
            if not self._is_pruned_module(module):
                continue

            module_scope_str = str(module_scope)
            if self.ignore_frozen_layers and not module.weight.requires_grad:
                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {} because"
                    " the layer appears to be frozen (requires_grad=False)".
                    format(module_scope_str))
                continue

            if not self._should_consider_scope(module_scope_str):
                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {}".format(
                        module_scope_str))
                continue

            if not self.prune_first and module in input_non_pruned_modules:
                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {} because"
                    " this scope is one of the first convolutions".format(
                        module_scope_str))
                continue
            if not self.prune_last and module in output_non_pruned_modules:
                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {} because"
                    " this scope is one of the last convolutions".format(
                        module_scope_str))
                continue

            if is_grouped_conv(module):
                if is_depthwise_conv(module):
                    previous_conv = get_previous_conv(target_model, module,
                                                      module_scope)
                    if previous_conv:
                        depthwise_bn = get_bn_for_module_scope(
                            target_model, module_scope)
                        bn_for_depthwise[str(previous_conv.op_exec_context.
                                             scope_in_model)] = depthwise_bn

                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {} because"
                    " this scope is grouped convolution".format(
                        module_scope_str))
                continue

            if not self.prune_downsample_convs and is_conv_with_downsampling(
                    module):
                nncf_logger.info(
                    "Ignored adding Weight Pruner in scope: {} because"
                    " this scope is convolution with downsample".format(
                        module_scope_str))
                continue

            nncf_logger.info(
                "Adding Weight Pruner in scope: {}".format(module_scope_str))
            operation = self.create_weight_pruning_operation(module)
            hook = UpdateWeight(operation).to(device)
            insertion_commands.append(
                InsertionCommand(
                    InsertionPoint(
                        InputAgnosticOperationExecutionContext(
                            "", module_scope, 0),
                        InsertionType.NNCF_MODULE_PRE_OP), hook,
                    OperationPriority.PRUNING_PRIORITY))

            related_modules = {}
            if self.prune_batch_norms:
                related_modules[
                    PrunedModuleInfo.BN_MODULE_NAME] = get_bn_for_module_scope(
                        target_model, module_scope)

            self._pruned_module_info.append(
                PrunedModuleInfo(module_scope_str, module, hook.operand,
                                 related_modules))

        if self.prune_batch_norms:
            self.update_minfo_with_depthwise_bn(bn_for_depthwise)

        return insertion_commands
Пример #5
0
    def _create_pruning_groups(self, target_model: NNCFNetwork):
        """
        This function groups ALL modules with pruning types to groups that should be pruned together.
        1. Create clusters for special ops (eltwises) that should be pruned together
        2. Create groups of nodes that should be pruned together (taking into account clusters of special ops)
        3. Add remaining single nodes
        4. Unite clusters for Conv + Depthwise conv (should be pruned together too)
        5. Checks for groups (all nodes in group can prune or all group can't be pruned)
        Return groups of modules that should be pruned together.
        :param target_model: model to work with
        :return: clusterisation of pruned nodes
        """
        graph = target_model.get_original_graph()
        pruned_types = self.get_op_types_of_pruned_modules()
        all_modules_to_prune = target_model.get_nncf_modules_by_module_names(self.compressed_nncf_module_names)
        all_nodes_to_prune = graph.get_nodes_by_types(pruned_types)  # NNCFNodes here
        assert len(all_nodes_to_prune) <= len(all_modules_to_prune)

        # 1. Clusters for special ops
        special_ops_types = self.get_types_of_grouping_ops()
        identity_like_types = IdentityMaskForwardOps.get_all_op_aliases()
        special_ops_clusterization = cluster_special_ops(target_model, special_ops_types,
                                                         identity_like_types)

        pruned_nodes_clusterization = Clusterization("id")

        # 2. Clusters for nodes that should be pruned together (taking into account clusters for special ops)
        for i, cluster in enumerate(special_ops_clusterization.get_all_clusters()):
            all_pruned_inputs = []
            pruned_inputs_idxs = set()

            for node in cluster.nodes:
                sources = get_sources_of_node(node, graph, pruned_types)
                for source_node in sources:
                    source_scope = source_node.op_exec_context.scope_in_model
                    source_module = target_model.get_module_by_scope(source_scope)
                    source_node_info = NodeInfo(source_node, source_module, source_scope)

                    if source_node.node_id not in pruned_inputs_idxs:
                        all_pruned_inputs.append(source_node_info)
                        pruned_inputs_idxs.add(source_node.node_id)
            if all_pruned_inputs:
                cluster = NodesCluster(i, list(all_pruned_inputs), [n.id for n in all_pruned_inputs])
                pruned_nodes_clusterization.add_cluster(cluster)

        last_cluster_idx = len(special_ops_clusterization.get_all_clusters())

        # 3. Add remaining single nodes as separate clusters
        for node in all_nodes_to_prune:
            if not pruned_nodes_clusterization.is_node_in_clusterization(node.node_id):
                scope = node.op_exec_context.scope_in_model
                module = target_model.get_module_by_scope(scope)
                node_info = NodeInfo(node, module, scope)

                cluster = NodesCluster(last_cluster_idx, [node_info], [node.node_id])
                pruned_nodes_clusterization.add_cluster(cluster)

                last_cluster_idx += 1

        # 4. Merge clusters for Conv + Depthwise conv (should be pruned together too)
        for node in all_nodes_to_prune:
            scope = node.op_exec_context.scope_in_model
            module = target_model.get_module_by_scope(scope)
            cluster_id = pruned_nodes_clusterization.get_cluster_by_node_id(node.node_id).id

            if is_depthwise_conv(module):
                previous_conv = get_previous_conv(target_model, module, scope)
                if previous_conv:
                    previous_conv_cluster_id = pruned_nodes_clusterization.get_cluster_by_node_id(
                        previous_conv.node_id).id
                    pruned_nodes_clusterization.merge_clusters(cluster_id, previous_conv_cluster_id)

        # 5. Checks for groups (all nodes in group can be pruned or all group can't be pruned).
        model_analyser = ModelAnalyzer(target_model)
        can_prune_analysis = model_analyser.analyse_model_before_pruning()
        self._check_pruning_groups(target_model, pruned_nodes_clusterization, can_prune_analysis)
        return pruned_nodes_clusterization