Exemple #1
0
    def sub_to_add_replacement(sub: Node):
        # we execute this transformation for V10 IR later on middle phase despite graph_condition
        # so we prevent Sub replacement on shape-calculating sub-graphs
        if sub.in_port(0).data.get_value() is not None and sub.in_port(
                1).data.get_value() is not None:
            return

        graph = sub.graph
        name = sub.soft_get('name', sub.id)

        # keep Add name the same as Sub -- because of mathematical equality of output tensors
        rename_node(node=sub, name=name + '/to_be_removed')

        # reconnect Sub in(out)puts to Add
        add = Add(graph, {'name': name}).create_node()
        rename_node(add, name)

        sub.in_port(0).get_connection().set_destination(add.in_port(0))
        sub.in_port(1).get_connection().set_destination(add.in_port(1))
        sub.out_port(0).get_connection().set_source(add.out_port(0))

        # restore mathematical equivalence to Sub operation: Sub(A, B) = Add(A, Mul(B, -1))
        const_dtype = sub.soft_get('data_type', np.float32)
        negate = create_op_with_const_inputs(
            graph, Mul, {1: np.array(-1, dtype=const_dtype)},
            {'name': name + '/neg_'})
        add.in_port(1).get_connection().insert_node(negate)
Exemple #2
0
 def undo_renaming(graph, fq_node):
     if 'orig_fq_name' in fq_node:
         node = ge.get_node_by_name(graph,
                                    '{fq_name}/pre_fq_input'.format(fq_name=fq_node.fullname),
                                    recursively=False)
         rename_node(node, node['orig_node_name'])
         rename_node(fq_node, fq_node['orig_fq_name'])
Exemple #3
0
    def replace_op(self, graph: Graph, node: Node):
        # save the original node name to use it in the new Pad op instance
        original_name = node.soft_get('name', node.id)
        rename_node(node, original_name + '/TBR')

        new_pad = Pad(graph, {
            'mode': node.soft_get('mode', None)
        }).create_node()
        rename_node(new_pad, original_name)

        node.in_port(0).get_connection().set_destination(new_pad.in_port(0))

        if node.soft_get('mode') == 'constant':
            # the input with fill value is an optional third input in ONNX
            if not node.in_port(2).disconnected():
                node.in_port(2).get_connection().set_destination(
                    new_pad.in_port(3))
            else:
                new_pad.in_port(3).connect(
                    Const(graph, {
                        'value': 0.0
                    }).create_node().out_port(0))

        # convert ONNX representation of the pads as [2 * N] to MO representation: [N] and [N]
        split_pads = create_op_with_const_inputs(graph, Split,
                                                 {1: int64_array(0)},
                                                 {'num_splits': 2})
        node.in_port(1).get_connection().set_destination(split_pads.in_port(0))
        split_pads.out_port(0).connect(new_pad.in_port(1))
        split_pads.out_port(1).connect(new_pad.in_port(2))

        return [new_pad.id]
Exemple #4
0
def make_node_names_unique(nodes: list, node_names: set):
    """
    :param nodes: List with nodes matching a specific name
    :param node_names: Set with all node names contained in the graph
    :return: None

    Result nodes will be renamed only when it is absolutely necessary(if there are several Result nodes with the same name).
    Function finds a position of Result nodes in the "nodes" list, take the first and rename all other nodes.
    If the "nodes" list does not contain Result nodes, then all nodes starting from the second one will be renamed.
    All new names are added to the "node_names" set.
    """
    results_pos = [
        idx for idx, node in enumerate(nodes) if node.op == 'Result'
    ]
    node_position_to_keep = 0
    if len(results_pos) != 0:
        node_position_to_keep = results_pos[0]
    for idx, node in enumerate(nodes):
        if idx != node_position_to_keep:
            new_node_name = node.soft_get('name', node.id) + '_' + str(idx)
            # preparing a new unique name for the node
            while new_node_name in node_names:
                new_node_name += '_' + str(idx)
            node_names.add(new_node_name)
            rename_node(node, new_node_name)
Exemple #5
0
    def div_to_mul_replacement(div: Node):
        # we execute this transformation for V10 IR later on middle phase despite graph_condition
        # so we prevent Div replacement on shape-calculating sub-graphs
        if div.in_port(0).data.get_value() is not None and div.in_port(1).data.get_value() is not None:
            return

        # cannot replace Div with Mul when the divisor is integer because the reciprocal number will be 0
        value = div.in_port(1).data.get_value()
        if value is not None and type(value.item(0)) == int:
            return

        graph = div.graph
        name = div.soft_get('name', div.id)

        # keep Mul name the same as Div -- because of mathematical equality of output tensors
        rename_node(node=div, name=name + '/to_be_removed')

        # reconnect Div in(out)puts to Mul
        mul = Mul(graph, {'name': name}).create_node()
        rename_node(mul, name)

        div.in_port(0).get_connection().set_destination(mul.in_port(0))
        div.in_port(1).get_connection().set_destination(mul.in_port(1))
        div.out_port(0).get_connection().set_source(mul.out_port(0))

        # restore mathematical equivalence to Div operation: Div(A, B) = Mul(A, Pow(B, -1))
        reciprocal = create_op_with_const_inputs(graph, Pow, {1: np.float64(-1)}, {'name': name + '/reciprocal_'})
        mul.in_port(1).get_connection().insert_node(reciprocal)
Exemple #6
0
 def _add_models_prefix(self):
     """Adds model name prefix to node names"""
     if not self._prefix_is_applied:
         self._prefix_is_applied = True
         for model_dict in self._models:
             model_name, model = model_dict['name'], model_dict['model']
             for node in ge.get_all_operation_nodes(model,
                                                    recursively=False):
                 rename_node(node, f'{model_name}_{node.name}')
Exemple #7
0
 def _restore_models_prefix(self):
     """Restores removed model name prefix in node name"""
     if self._cache.node_names:
         self._prefix_is_applied = True
         for model_dict in self._models:
             model_name, model = model_dict['name'], model_dict['model']
             for node in ge.get_all_operation_nodes(model, recursively=False):
                 if node.name in self._cache.node_names[model_name]:
                     rename_node(node, f'{model_name}_{node.name}')
         self._cache.pop('node_names')
Exemple #8
0
 def _remove_models_prefix(self):
     """Removes model name prefix from node names"""
     if self._prefix_is_applied:
         self._prefix_is_applied = False
         for model_dict in self._models:
             model_name, model = model_dict['name'], model_dict['model']
             self._cache.node_names[model_name] = []
             for node in ge.get_all_operation_nodes(model, recursively=False):
                 if node.name.startswith(model_name):
                     rename_node(node, node.name.replace(model_name + '_', '', 1))
                     self._cache.node_names[model_name].append(node.name)
Exemple #9
0
    def find_and_replace_pattern(self, graph: Graph):
        reverse_nodes = graph.get_op_nodes(op='Reverse')
        for reverse in reverse_nodes:
            reverse_name = reverse.soft_get('name', reverse.id)

            assert reverse.in_port(1).disconnected()
            assert reverse.has_valid('axis')

            in_shape_rank = len(reverse.in_port(0).data.get_shape())
            # 1. Add new dimension as batch for rank = 1 to have batch != seq_axis
            if in_shape_rank == 1:
                unsq_node = create_op_node_with_second_input(graph, Unsqueeze, int64_array([0]),
                                                             {'name': reverse_name+"/Unsqueeze"})
                reverse.in_port(0).get_source().connect(unsq_node.in_port(0))
                new_in = unsq_node.out_port(0)
                batch_axis = 0
                seq_axis = 1
            else:
                new_in = reverse.in_port(0).get_source()
                seq_axis = reverse['axis']
                batch_axis = 0 if seq_axis != 0 else 1

            # 2. For ReverseSequence 1-port input is seq_lengths => create this input node as
            # shape[seq_axis] broadcasted to shape[batch_axis]
            # in ---> ShapeOf ----> Gather(seq_axis)  ----> Broadcast----->
            #            |                                      |
            #            | -------> Gather(batch_axis)----------|
            shape_node = Shape(graph, {'name': reverse_name + "/Shape"}).create_node()
            new_in.connect(shape_node.in_port(0))
            seq_axis_node = node_to_get_shape_value_of_indices(shape_node, [seq_axis])
            batch_node = node_to_get_shape_value_of_indices(shape_node, [batch_axis])
            broadcast_node = Broadcast(graph, {'name': reverse_name + "/Broadcast"}).create_node()
            broadcast_node.in_port(0).connect(seq_axis_node.out_port(0))
            broadcast_node.in_port(1).connect(batch_node.out_port(0))

            # 3. Create new ReverseSequence node and reconnect all inputs/outputs to it
            rename_node(reverse, reverse_name + '/to_delete')
            reverse_sequence = ReverseSequence(graph, {'name':  reverse_name, 'seq_axis': seq_axis,
                                                       'batch_axis': batch_axis}).create_node()
            reverse_sequence.in_port(0).connect(new_in)
            reverse_sequence.in_port(1).connect(broadcast_node.out_port(0))

            # 4. remove added dimension for rank = 1
            if in_shape_rank == 1:
                rename_node(reverse_sequence, reverse_name + '/ReverseSequence')
                squeeze_node = create_op_node_with_second_input(graph, Squeeze, int64_array([0]),
                                                                {'name': reverse_name})
                squeeze_node.in_port(0).connect(reverse_sequence.out_port(0))
                reverse.out_port(0).get_connection().set_source(squeeze_node.out_port(0))
            else:
                reverse.out_port(0).get_connection().set_source(reverse_sequence.out_port(0))

        # 5. Delete old Reverse node
        graph.remove_nodes_from([reverse.id for reverse in reverse_nodes])
Exemple #10
0
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_op_nodes(op='ATen', operator='embedding_bag'):
            assert node.soft_get('mode') == 0, 'ATen::embedding_bag has unsupported mode, only "sum" ' \
                                               'mode is supported for node {}.'.format(node.id)
            node_name = node.soft_get('name', node.id)
            rename_node(node, node_name + '/TBR')
            is_packed = False
            if len(node.in_ports()) < 3 or node.in_port(2).disconnected():
                is_packed = True
                embedding_bag = EmbeddingBagPackedSum(graph, {'name': node_name}).create_node()
            else:
                embedding_bag = EmbeddingBagOffsetsSum(graph, {'name': node_name}).create_node()
                node.in_port(2).get_connection().set_destination(embedding_bag.in_port(2))
            rename_node(embedding_bag, node_name)
            node.in_port(0).get_connection().set_destination(embedding_bag.in_port(0))
            node.in_port(1).get_connection().set_destination(embedding_bag.in_port(1))
            node.out_port(0).get_connection().set_source(embedding_bag.out_port(0))
            if len(node.in_ports()) == 4 and not node.in_port(3).disconnected():
                if is_packed:
                    node.in_port(3).get_connection().set_destination(embedding_bag.in_port(2))
                else:
                    # connect per_sample_weights
                    node.in_port(3).get_connection().set_destination(embedding_bag.in_port(4))

                    weights_shape_node = Shape(graph, {'name': node_name + '/WeightsShape'}).create_node()

                    weights_rank_node = Rank(graph, {'name': node_name + '/WeightsRank'}).create_node()
                    last_dim_node = get_canonical_axis_index_node(weights_rank_node, -1)
                    weights_last_dim = get_shape_values_by_indices_node(weights_shape_node, last_dim_node)

                    weights_first_dim = node_to_get_shape_value_of_indices(weights_shape_node, [0])

                    zero_col_node = create_op_with_const_inputs(graph, Broadcast, {0: int64_array([0])},
                                                                {'name': node_name + '/Broadcast'})
                    zero_col_node.in_port(1).connect(weights_last_dim.out_port(0))

                    default_embeddings_node = create_op_with_const_inputs(graph, Unsqueeze, {1: int64_array(0)},
                                                                          {'name': node_name + '/Unsqueeze'})
                    default_embeddings_node.in_port(0).connect(zero_col_node.out_port(0))

                    # expand embedding table with zeros
                    weights_concat = Concat(graph, {'axis': 0, 'in_ports_count': 2,
                                                    'name': node_name + '/Concat'}).create_node()
                    embedding_bag.in_port(0).get_connection().set_destination(weights_concat.in_port(0))
                    weights_concat.in_port(0).get_connection().add_destination(weights_shape_node.in_port(0))
                    weights_concat.in_port(0).get_connection().add_destination(weights_rank_node.in_port(0))
                    weights_concat.in_port(1).connect(default_embeddings_node.out_port(0))
                    weights_concat.out_port(0).connect(embedding_bag.in_port(0))

                    # point default index to expanded part of embedding table
                    weights_first_dim.out_port(0).connect(embedding_bag.in_port(3))
Exemple #11
0
    def full_fq_noise_stats(self, model):
        fully_quantized_model = deepcopy(model)
        model = self.get_nonquantized_model(model)
        for node in mu.get_all_operation_nodes(fully_quantized_model):
            rename_node(node, node.name + self.q_suffix)
            node.fullname += self.q_suffix

        composite_model = get_composite_model(model,
                                              fully_quantized_model,
                                              quantized_suffix=self.q_suffix)

        # collect convolution output residuals for original vs. quantized model
        inputs_outputs_layout = {}
        stat_calculation_layers = {}

        conv_nodes = mu.get_nodes_by_type(model, ['Convolution'])
        sorted_conv_nodes = [
            node for node in model.pseudo_topological_sort()
            if node in conv_nodes
        ]
        for conv in sorted_conv_nodes:
            add_after_conv = nu.get_node_output(conv, 0)[0]
            if add_after_conv.type == 'Add':
                # needs special layout for input/output stats
                stat_calculation_layers.update(
                    {add_after_conv.fullname: conv.fullname})
                inputs_outputs_layout[add_after_conv.fullname] = {
                    'layerwise_stat':
                    SQNRStatistic(self.activation_stats, self.q_suffix)
                }
                inputs_outputs_layout[add_after_conv.fullname +
                                      self.q_suffix] = {}

        del model, fully_quantized_model
        self._engine.set_model(composite_model)
        _, accumulated_stats = self._engine.predict(
            stats_layout=inputs_outputs_layout,
            sampler=IndexSampler(range(self._config['stat_subset_size'])))
        qnoise_values = [
            self.mean_sample_estimator(
                accumulated_stats[layer]['layerwise_stat'])
            for layer in stat_calculation_layers
        ]
        noise_data = {
            'noise_metric': qnoise_values,
            'layer_name': list(stat_calculation_layers.values()),
        }
        if 'results_dump_filename' in self._config:
            pd.DataFrame(noise_data).to_csv(
                self._config['results_dump_filename'])
        return noise_data
Exemple #12
0
    def replace_op(self, graph: Graph, node: Node):
        name = node.soft_get('name', node.id)
        axis = node.soft_get('axis', 0)

        rename_node(node=node, name=name + '/to_be_removed')
        cumsum_node = create_op_node_with_second_input(graph, CumSum,
                                                       int64_array(axis), {
                                                           'name': name,
                                                           'reverse': False,
                                                           'exclusive': False
                                                       })
        rename_node(cumsum_node, name)

        node.in_port(0).get_connection().set_destination(
            cumsum_node.in_port(0))
        if node.has_valid('mx_out_type') and node['mx_out_type'] is not None:
            rename_node(node=cumsum_node, name=name + '/CumSum')
            convert = Cast(graph, {
                'name': name,
                'dst_type': node['mx_out_type']
            }).create_node()
            rename_node(convert, name)
            cumsum_node.out_port(0).connect(convert.in_port(0))
            return [convert.id]
        else:
            return [cumsum_node.id]
Exemple #13
0
    def floor_div_replacement(floor_div: Node):
        graph = floor_div.graph
        name = floor_div.soft_get('name', floor_div.id)

        div = Div(graph, {'name': name + '/Div'}).create_node()
        floor = Floor(graph, {'name': name}).create_node()
        div.out_port(0).connect(floor.in_port(0))

        div.in_port(0).connect(floor_div.in_port(0).get_source())
        div.in_port(1).connect(floor_div.in_port(1).get_source())
        floor_div.out_port(0).get_connection().set_source(floor.out_port(0))

        graph.remove_node(floor_div.id)
        rename_node(floor, name)
    def replace_pattern(graph: Graph, match: dict):
        node = match['normalize']

        # rename normalize node since it will be no longer output node after the transformation
        output_name = node.soft_get('name', node.id)
        normalizel2_name = output_name + '/normalizel2'
        rename_node(node, normalizel2_name)

        assert node.in_port(0).data.get_shape().size in [2, 3, 4]
        assert node.has_valid('across_spatial')
        assert node.has_valid('channel_shared')
        assert node.has_valid('eps')

        if 'bin' in node.in_edge(1):
            del node.in_edge(1)['bin']

        weights = node.in_port(1).data.get_value()
        assert weights is not None
        # in the code below we intentionally use get_source() to get the out port. Because updating the out port will
        # update the Const node 'value' and 'shape' attributes
        if node.channel_shared or all(weights == weights[0]):
            node.in_port(1).get_source().data.set_value(np.array([weights[0]]))
        else:
            new_shape = np.ones((len(node.in_port(0).data.get_shape())),
                                dtype=np.int64)
            new_shape[1] = -1
            node.in_port(1).get_source().data.set_value(
                np.array(weights).reshape(new_shape))

        mul = Mul(graph, {'name': output_name}).create_node()
        rename_node(mul, output_name)

        if not node.across_spatial:
            axes = int64_array([1])
        else:
            axes = int64_array(
                np.arange(start=1, stop=node.in_port(0).data.get_shape().size))

        normalizel2 = create_op_with_const_inputs(graph, NormalizeL2Op,
                                                  {1: axes}, {
                                                      'eps_mode': 'add',
                                                      'eps': node.eps
                                                  })

        node.out_port(0).get_connection().set_source(mul.out_port(0))
        node.in_port(1).get_connection().get_source().connect(mul.in_port(1))
        normalizel2.out_port(0).connect(mul.in_port(0))
        node.in_port(0).get_connection().set_destination(
            normalizel2.in_port(0))
Exemple #15
0
        def change_names(_, match):
            fq_node = match['fq']
            input_node = get_node_input(fq_node, 0)
            new_fq_name = copy(input_node.name)
            if 'orig_node_name' in input_node:
                new_fq_name = copy(input_node['orig_node_name'])

            input_node_outputs = get_all_node_outputs(input_node)
            if len(input_node_outputs) > 1 and all([op.type == 'FakeQuantize' for op in input_node_outputs]):
                new_fq_name += '.{}'.format(fq_node.in_port(0).get_source().idx)

            fq_node['orig_fq_name'] = copy(fq_node.name)

            if 'orig_node_name' not in input_node:
                input_node['orig_node_name'] = copy(input_node.name)
                rename_node(input_node, f'{input_node.name}/pre_fq_input')
            rename_node(fq_node, new_fq_name)
    def find_and_replace_pattern(self, graph: Graph):
        for attr_clamp in graph.get_op_nodes(op='AttributedClamp'):
            original_name = attr_clamp.soft_get('name', attr_clamp.id)

            rename_node(attr_clamp, original_name + '/TBR')
            min_value = attr_clamp.soft_get('min', np.finfo(np.float32).min)
            max_value = attr_clamp.soft_get('max', np.finfo(np.float32).max)
            new_clamp = create_op_with_const_inputs(graph, Clamp, {
                1: float32_array(min_value),
                2: float32_array(max_value)
            }, {'name': original_name})
            rename_node(new_clamp, original_name)

            attr_clamp.in_port(0).get_connection().set_destination(
                new_clamp.in_port(0))
            attr_clamp.out_port(0).get_connection().set_source(
                new_clamp.out_port(0))
            graph.remove_node(attr_clamp.id)
Exemple #17
0
    def replace_pattern(self, graph: Graph, match: dict):
        gather = match['GatherND']
        gather_name = gather.soft_get('name', gather.id)
        input_shape = gather.in_node(0).shape
        indices = gather.in_node(1).value
        if indices is None:
            # We can't do such special pass without indices value
            return

        # 0. All needed checks that we can replace GatherND by Gather
        gather_idx = self.indices_check(indices, input_shape)
        if gather_idx is None:
            log.warning(
                'Node {} with op=GatherND can\'t be normalized to op=Gather.'.
                format(gather_name))
            return

        # 1. Add Reshape and connect
        new_shape = int64_array([-1] + list(input_shape[indices.shape[-1]:]))
        reshape = create_op_node_with_second_input(
            graph, Reshape, new_shape,
            {'name': gather_name + '/Reshape_for_GatherND/'})
        gather.in_port(0).get_connection().set_destination(reshape.in_port(0))

        # 2. Change indices from Nd to 1d:
        new_indices = np.reshape(
            np.take(indices, indices=[gather_idx], axis=-1), [-1])

        rename_node(gather, gather_name + '/to_delete')

        # 3. Create new Gather operation and reconnect all inputs/outputs
        new_gather = create_op_with_const_inputs(graph, Gather, {
            1: new_indices,
            2: int64_array(0)
        }, {'name': gather_name})
        rename_node(new_gather, gather_name)

        reshape.out_port(0).connect(new_gather.in_port(0))

        gather.out_port(0).get_connection().set_source(new_gather.out_port(0))

        # 4. Remove old Gather node
        graph.remove_node(gather.id)
    def replace_pattern(graph: Graph, match: dict):
        relu = match['leakyrelu']
        relu_name = relu.soft_get('name', relu.id)
        if not relu.has_valid('negative_slope'):
            return

        rename_node(relu, relu_name + '/to_delete')
        # Create PReLU op and reconnect input/output from LeakyReLU to PReLU
        prelu = PReLU(graph, dict(name=relu_name)).create_node()
        rename_node(prelu, relu_name)

        const = Const(
            graph,
            dict(name=relu_name + "/weights",
                 value=mo_array([relu.negative_slope]))).create_node()

        relu.in_port(0).get_connection().set_destination(prelu.in_port(0))
        const.out_port(0).connect(prelu.in_port(1))
        relu.out_port(0).get_connection().set_source(prelu.out_port(0))
Exemple #19
0
    def find_and_replace_pattern(self, graph: Graph):
        for tfpad in graph.get_op_nodes(op='TFPad'):
            # save the original node name to use it in the new Pad op instance
            original_name = tfpad.soft_get('name', tfpad.id)
            tfpad['name'] = original_name + '/to_be_removed'

            new_pad = Pad(graph, {
                'mode': tfpad.soft_get('mode', None),
            }).create_node()
            rename_node(new_pad, original_name)

            tfpad.in_port(0).get_connection().set_destination(
                new_pad.in_port(0))

            if tfpad.soft_get('mode') == 'constant':
                # the input with fill value is an optional third input in TF
                if not tfpad.in_port(2).disconnected():
                    tfpad.in_port(2).get_connection().set_destination(
                        new_pad.in_port(3))

            # convert TF representation of the pads as [N, 2] to MO representation: [N] and [N]
            transposed_pads = create_op_with_const_inputs(
                graph, Transpose, {1: int64_array([1, 0])})
            tfpad.in_port(1).get_connection().set_destination(
                transposed_pads.in_port(0))
            split_pads = create_op_with_const_inputs(graph, Split,
                                                     {1: int64_array(0)},
                                                     {'num_splits': 2})
            transposed_pads.out_port(0).connect(split_pads.in_port(0))
            for port_ind in range(2):
                split_pads.add_output_port(port_ind, skip_if_exist=True)
                new_pad.in_port(port_ind + 1).connect(
                    split_pads.out_port(port_ind))
                new_pad.in_port(port_ind + 1).get_connection().insert_node(
                    create_op_with_const_inputs(graph, Squeeze,
                                                {1: int64_array([0])}))

            tfpad.out_port(0).get_connection().set_source(new_pad.out_port(0))
            graph.remove_node(tfpad.id)
def insert_experimental_layers(graph: Graph, input_fpn_heads: list, inp: str,
                               out: str):
    old_output_node = Node(graph, out)
    output_name = old_output_node.soft_get('name', old_output_node.id)
    old_output_node_name = output_name + '/old'
    rename_node(old_output_node, old_output_node_name)

    input_fpn_head_nodes = [
        Node(graph, node_id) for node_id in input_fpn_heads
    ]
    fpn_roi_align = ExperimentalDetectronROIFeatureExtractor(
        graph, {
            'name': output_name,
            'output_size': 7,
            'pyramid_scales': int64_array([4, 8, 16, 32, 64]),
            'sampling_ratio': 2,
        }).create_node()
    rename_node(fpn_roi_align, output_name)
    fpn_roi_align.in_port(0).connect(Node(graph, inp).out_port(0))
    for ind, fpn_node in enumerate(input_fpn_head_nodes):
        fpn_roi_align.in_port(ind + 1).connect(fpn_node.out_port(0))

    old_output_node.out_port(0).get_connection().set_source(
        fpn_roi_align.out_port(0))
Exemple #21
0
    def replace_pattern(self, graph: Graph, match: dict):
        clamp = match['clamp']
        name = clamp.soft_get('name', clamp.id)

        min_value = max_value = None
        port_1_exist = clamp.has_port('in', 1) and not clamp.in_port(1).disconnected()
        port_2_exist = clamp.has_port('in', 2) and not clamp.in_port(2).disconnected()
        if port_1_exist and clamp.in_port(1).get_source().node.soft_get('type') == 'Const':
            min_value = clamp.in_port(1).data.get_value()
        if port_2_exist and clamp.in_port(2).get_source().node.soft_get('type') == 'Const':
            max_value = clamp.in_port(2).data.get_value()

        rename_node(clamp, name + '/TBR')
        if min_value is None or max_value is None:
            max_node = min_node = None
            if port_1_exist:
                max_node = Maximum(graph, {}).create_node()
                clamp.in_port(0).get_connection().set_destination(max_node.in_port(0))
                clamp.in_port(1).get_connection().set_destination(max_node.in_port(1))
                clamp.out_port(0).get_connection().set_source(max_node.out_port(0))
            if port_2_exist:
                min_node = Minimum(graph, {}).create_node()
                if max_node is not None:
                    max_node.out_port(0).get_connection().set_source(min_node.out_port(0))
                    max_node.out_port(0).connect(min_node.in_port(0))
                else:
                    clamp.in_port(0).get_connection().set_destination(min_node.in_port(0))
                    clamp.out_port(0).get_connection().set_source(min_node.out_port(0))
                clamp.in_port(2).get_connection().set_destination(min_node.in_port(1))
            assert min_node is not None or max_node is not None, 'Clamp node should have either min or max input used'
            rename_node(min_node if min_node is not None else max_node, name)
        else:
            a_clamp = AttributedClamp(graph, {'name': name, 'min': min_value, 'max': max_value}).create_node()
            rename_node(a_clamp, name)
            clamp.in_port(0).get_connection().set_destination(a_clamp.in_port(0))
            clamp.out_port(0).get_connection().set_source(a_clamp.out_port(0))
Exemple #22
0
    def replace_timeheightconv(self, graph: Graph, node: Node):
        req_time_offsets = node.soft_get('time_offsets')
        offsets = node.soft_get("offsets", [[]])
        all_time_offsets = list(set(offsets[:, 0]))
        all_time_offsets.sort()
        in_name = node.soft_get('name', node.id)
        rename_node(node, in_name + '/to_delete')

        # create memoryoffsets for context gathering
        # we need concat if time offsets more than 1
        concat = Concat(graph,
                        attrs={
                            'name': in_name + '/Concat',
                            'in_ports_count': len(all_time_offsets)
                        }).create_node()
        i = 0
        for t in all_time_offsets:
            # if time offset included in required_time_offsets we don't need default value
            has_default = t not in req_time_offsets
            memoff = MemoryOffset(graph,
                                  attrs={
                                      'name':
                                      in_name + '/MemoryOffset_' + str(i),
                                      't':
                                      t,
                                      'has_default':
                                      has_default,
                                      'splitted':
                                      False,
                                      'pair_name':
                                      in_name + '/MemoryOffset_pair_' + str(i)
                                  }).create_node()
            concat.in_port(i).connect(memoff.out_port(0))
            memoff.in_port(0).connect(node.in_port(0).get_source())
            i = i + 1

        stride = node.soft_get("height_subsample", 1)

        kernel = int64_array([0, 0])
        kernel[0] = len(set(offsets[:, 0]))
        kernel[1] = len(set(offsets[:, 1]))

        pad_h = int64_array([0, 0])
        pad_h[0] = -min(offsets[:, 1]) if min(offsets[:, 1]) < 0 else 0
        pad_h[1] = stride * node.height_out - (node.height_in -
                                               max([max(offsets[:, 1]), 0]))

        dilation_t = (max(offsets[:, 0]) - min(offsets[:, 0])) / (
            kernel[0] - 1) if kernel[0] > 1 else 1
        dilation_h = (max(offsets[:, 1]) - min(offsets[:, 1])) / (
            kernel[1] - 1) if kernel[0] > 1 else 1

        conv_attrs = {
            'name':
            in_name,
            'output':
            node['out_channels'],
            'height_in':
            node.height_in,
            'bias_term':
            None,
            'pad':
            int64_array([[0, 0], [0, 0], [0, 0], pad_h]),
            'pad_spatial_shape':
            int64_array([[0, 0], pad_h]),
            'dilation':
            int64_array([1, 1, dilation_t, dilation_h]),
            'kernel':
            int64_array(
                [node.out_channels, node.in_channels, kernel[0], kernel[1]]),
            'stride':
            int64_array([1, 1, 1, stride]),
            'kernel_spatial':
            kernel,
            'input_feature_channel':
            1,
            'output_feature_channel':
            0,
            'channel_dims':
            int64_array([1]),
            'spatial_dims':
            int64_array([2, 3]),
            'batch_dims':
            int64_array([0]),
            'kernel_spatial_idx':
            int64_array([2, 3]),
            'group':
            1,
            'reshape_kernel':
            True,
            'bias_addable':
            True,
        }
        conv = Convolution(graph, attrs=conv_attrs).create_node()
        conv.in_port(0).connect(concat.out_port(0))
        conv.in_port(1).connect(node.in_port(1).get_source())

        # change layout for weights from OHWI to OIHW
        # in future should be replaced by common Permute mechanics
        weights = conv.in_port(1).get_source().node.value
        weights = weights.reshape(
            int64_array([node.out_channels, -1, node.in_channels]))
        weights = weights.transpose(int64_array([0, 2, 1]))
        weights = weights.flatten()
        conv.in_port(1).get_source().node.value = weights

        conv.in_port(2).connect(node.in_port(2).get_source())
        node.out_port(0).get_connection().set_source(conv.out_port(0))
        graph.remove_node(node.id)
    def mxrepeat_decomposition(node: Node):
        graph = node.graph
        name = node.soft_get('name', node.id)

        rename_node(node, name + '/to_be_removed')

        # Unqueeze
        input_rank = Rank(graph, {'name': name + '/Rank'}).create_node()
        node.in_port(0).get_source().connect(input_rank.in_port(0))

        axis = get_canonical_axis_index_node(input_rank, node.axis)
        unsqueeze_axis = create_op_node_with_second_input(
            graph,
            Add,
            int64_array([1]), {'name': name + '/Unsqueeze/Axis'},
            input_node=axis)

        unsqueeze = Unsqueeze(graph, {
            'name': name + '/Unsqueeze'
        }).create_node()
        unsqueeze.in_port(1).connect(unsqueeze_axis.out_port(0))

        # Tile (1, 1, ..., repeats, ..., 1)
        # we generate tile array according to the following table:

        # parts:       |      first      |  repeats |  second     |
        # i:           | 0, 1, ..., axis,| axis + 1,| ..., rank+1 |
        # tile_array:  | 1, 1, ...,  1  ,| repeats ,| ...,   1    |

        one = Const(graph, {
            'name': name + '/Broadcast/One',
            'value': int64_array([1])
        }).create_node()
        first_ones = Broadcast(graph, {
            'name': name + '/Broadcast/Ones_first_part'
        }).create_node()
        first_ones.in_port(0).connect(one.out_port(0))
        first_ones.in_port(1).connect(unsqueeze_axis.out_port(0))

        repeats = Const(graph, {
            'name': name + '/repeats',
            'value': int64_array([node.repeats])
        }).create_node()

        second_ones = Broadcast(graph, {
            'name': name + '/Broadcast/Ones_second_part'
        }).create_node()
        second_part_broadcast_shape = Sub(
            graph, {
                'name': name + '/Broadcast/Shape/second_part'
            }).create_node()
        second_part_broadcast_shape.in_port(0).connect(input_rank.out_port(0))
        second_part_broadcast_shape.in_port(1).connect(
            unsqueeze_axis.out_port(0))
        second_ones.in_port(0).connect(one.out_port(0))
        second_ones.in_port(1).connect(second_part_broadcast_shape.out_port(0))

        tile_repeats = new_shape_node_from_shape_nodes(
            [first_ones, repeats, second_ones])
        tile = Tile(graph, {'name': name + '/Tile'}).create_node()
        tile.in_port(1).connect(tile_repeats.out_port(0))

        # Reshape (input_shape[:axis], input_shape[axis] * repeats, input_shape[axis+1:])
        # we generate reshape dim array according to the following table:

        # parts:       |    first   |                rep           |  second   |
        # i:           | 0, 1, ... ,|               axis,          | ..., rank |
        # dim_array:   | inp_sh[i] ,| input_shape[axis] * repeats ,| inp_sh[i] |

        input_shape = Shape(graph, {'name': name + '/Shape'}).create_node()
        node.in_port(0).get_source().connect(input_shape.in_port(0))

        first_input_shape_part = get_shape_values_by_range_idxs(
            input_shape,
            input_rank,
            begin=0,
            end=node.axis,
            include_begin=True,
            include_end=False)

        original_axis_dim = create_op_with_const_inputs(
            graph,
            Gather, {2: int64_array(0)}, {'name': name + '/OriginalDim'},
            input_node=input_shape)
        original_axis_dim.in_port(1).connect(axis.out_port(0))

        repeated_dimention = Mul(graph, {
            'name': name + '/RepeatedDim'
        }).create_node()
        repeated_dimention.in_port(0).connect(original_axis_dim.out_port(0))
        repeated_dimention.in_port(1).connect(repeats.out_port(0))

        second_input_shape_part = get_shape_values_by_range_idxs(
            input_shape,
            input_rank,
            begin=node.axis,
            end=-1,
            include_begin=False,
            include_end=True)

        output_shape = new_shape_node_from_shape_nodes([
            first_input_shape_part, repeated_dimention, second_input_shape_part
        ])

        reshape = Reshape(graph, {'name': name}).create_node()
        rename_node(reshape, name)
        reshape.in_port(1).connect(output_shape.out_port(0))

        # Final connections
        node.in_port(0).get_connection().set_destination(unsqueeze.in_port(0))
        tile.in_port(0).connect(unsqueeze.out_port(0))
        reshape.in_port(0).connect(tile.out_port(0))
        node.out_port(0).get_connection().set_source(reshape.out_port(0))
Exemple #24
0
    def layerwise_fq_noise(self, model):
        fully_quantized_model = deepcopy(model)
        model = self.get_nonquantized_model(model)

        def get_single_fq_model(model, fq_node):
            fq_remover = RemoveFakeQuantize()
            fq_cut_node_list = fq_remover.find_fq_nodes_to_cut(fq_node)
            cut_fqs = []
            fq_names = [
                node.fullname
                for node in mu.get_nodes_by_type(model, ['FakeQuantize'])
            ]
            for node_name in fq_names:
                if node_name not in cut_fqs and node_name not in fq_cut_node_list:
                    model, cut_fq_layers, _ = self._graph_transformer.remove_fq_nodes(
                        model, [node_name])
                    cut_fqs += cut_fq_layers
            return model

        qnoise_values = []
        node_names = []
        conv_nodes = mu.get_nodes_by_type(fully_quantized_model,
                                          ['Convolution'])
        sorted_conv_nodes = [
            node for node in fully_quantized_model.pseudo_topological_sort()
            if node in conv_nodes
        ]
        for conv_node in sorted_conv_nodes:
            conv_input_node = nu.get_node_input(conv_node, 0)
            add_after_conv = nu.get_node_output(conv_node, 0)[0]
            if conv_input_node.type == 'FakeQuantize' and add_after_conv.type == 'Add':
                logger.info(
                    'Calculating stats for quantized convolution {}'.format(
                        conv_node.fullname))
                single_fq_layer_model = get_single_fq_model(
                    deepcopy(fully_quantized_model), conv_input_node)

                for node in mu.get_all_operation_nodes(single_fq_layer_model):
                    rename_node(node, node.name + self.q_suffix)
                    node.fullname += self.q_suffix

                composite_model = get_composite_model(deepcopy(model),
                                                      single_fq_layer_model)

                # collect convolution output residuals for original vs. quantized model
                inputs_outputs_layout = {}
                add_after_conv = nu.get_node_output(
                    mu.get_node_by_name(composite_model, conv_node.fullname),
                    0)[0]
                # needs special layout for input/output stats
                inputs_outputs_layout[add_after_conv.fullname] = {
                    'layerwise_stat':
                    SQNRStatistic(self.activation_stats, self.q_suffix)
                }
                inputs_outputs_layout[add_after_conv.fullname +
                                      self.q_suffix] = {}

                self._engine.set_model(composite_model)
                _, accumulated_stats = self._engine.predict(
                    stats_layout=inputs_outputs_layout,
                    sampler=IndexSampler(
                        range(self._config['stat_subset_size'])))
                qnoise_values.append(
                    self.mean_estimator(accumulated_stats[
                        add_after_conv.fullname]['layerwise_stat']))
                node_names.append(conv_node.fullname)

        noise_data = {'noise_metric': qnoise_values, 'layer_name': node_names}
        if 'results_dump_filename' in self._config:
            pd.DataFrame(noise_data).to_csv(
                self._config['results_dump_filename'])
        return noise_data