Ejemplo n.º 1
0
def _insert_pooling(graph: Graph, first_node: Node, second_node: Node,
                    spatial_dims):
    """
    This function inserts point wise pooling layer between two nodes
    """
    log.debug("STRIDE PROP: Insert pooling between {} and {}".format(
        first_node.name, second_node.name))
    stride_prop = second_node.stride_prop
    assert len(graph.get_edge_data(first_node.id, second_node.id)) == 1
    eattrs = graph.get_edge_data(first_node.id, second_node.id)[0]
    graph.remove_edge(first_node.id, second_node.id)

    pooling = Pooling(
        graph,
        dict(name='Pooling_',
             spatial_dims=spatial_dims,
             window=np.array([1, 1, 1, 1]),
             output_spatial_shape=None,
             stride=np.array(stride_prop),
             pad_spatial_shape=np.array([[0, 0], [0, 0]]),
             pad=np.array([[0, 0], [0, 0], [0, 0], [0, 0]]),
             pool_method='max',
             is_partial_inferred=False))
    pooling_data = pooling.create_node_with_data([first_node])

    _clean_fw_tensor_attrs(pooling_data)

    graph.add_edges_from([(pooling_data.id, second_node.id, eattrs)])
Ejemplo n.º 2
0
    def replace_op(self, graph: Graph, node: Node) -> list:
        input_node = node.in_node(0)

        input_reshape_node = Reshape(graph, {
            'name': 'Reshape/' + node.name,
            'infer': Reshape.kaldi_infer
        }).create_node([input_node])

        pooling_node = Pooling(graph, graph.node[node.id]).create_node(
            [input_reshape_node])

        output_reshape_node = Reshape(graph, {
            'name': node.name + '/Reshape/',
            'infer': Reshape.kaldi_infer
        }).create_node([pooling_node])

        return [output_reshape_node.id]
Ejemplo n.º 3
0
    def find_and_replace_pattern(self, graph: Graph):
        for pool_v2_node in graph.get_op_nodes(op='PoolingV2'):
            pool_v2_name = pool_v2_node.soft_get('name', pool_v2_node.id)

            pool_v1_node = Pooling(graph, {'window': pool_v2_node.in_port(1).data.get_value(),
                                           'stride': pool_v2_node.in_port(2).data.get_value(),

                                           'pad': pool_v2_node.pad,
                                           'spatial_dims': pool_v2_node.spatial_dims,
                                           'auto_pad': pool_v2_node.auto_pad,
                                           'output_spatial_shape': pool_v2_node.output_spatial_shape,
                                           'pad_spatial_shape': pool_v2_node.pad_spatial_shape,

                                           'pool_method': pool_v2_node.pool_method,
                                           'permute_attrs': pool_v2_node.permute_attrs,}).create_node()

            rename_nodes([(pool_v2_node, pool_v2_name + '/to_be_removed'), (pool_v1_node, pool_v2_name)])

            pool_v2_node.in_port(0).get_connection().set_destination(pool_v1_node.in_port(0))
            pool_v2_node.out_port(0).get_connection().set_source(pool_v1_node.out_port(0))
Ejemplo n.º 4
0
    def replace_pattern(self, graph: nx.MultiDiGraph, match: dict):
        node = match['reduce']
        if not node.has_valid('reduce_type') or node.reduce_type.lower() not in self.supported_reduce_types:
            log.error("Reduce type {} is not supported for node {}".format(node.soft_get('reduce_type'), node.id))
            return

        reduce_type = node.reduce_type.lower()
        if reduce_type not in self.pool_method_map:
            log.error("Reduce type {} is not included in pool_method_map. Please update pool_method_map with new key "
                      "{}".format(reduce_type, reduce_type))
            return

        input_data = node.in_node()
        output_data = node.out_node()

        input_shape = node.in_node().shape
        output_shape = node.out_node().shape

        # normalize node.axis to exclude negative indices
        node.axis = [get_canonical_axis_index(input_shape, a) for a in node.axis]

        axis = node.axis

        # Check that values in axis list are consecutive
        for idx in range(1, len(axis)):
            if axis[idx] != (axis[idx - 1] + 1):
                log.error("Reduce with not consecutive axes {} is not supported ".format(axis))
                return

        layout = graph.graph['layout']

        # So now we are sure that we can convert Reduce to appropriate operation

        # 1. Calculate shape that will be used in reduction
        reduction_dim = np.prod([input_shape[idx] for idx in axis])
        begin_dims = np.array([input_shape[idx] for idx in range(axis[0])])
        end_dim = np.prod([input_shape[idx] for idx in range(axis[-1] + 1, len(input_shape))])

        # 2. Create reshape with appropriate shape
        if layout == 'NCHW':
            if len(begin_dims) > 2:
                begin_dims = np.array([np.prod(begin_dims[0:-1]), begin_dims[-1]], dtype=np.int64)
            else:
                # Expand begin_dims to 2
                begin_dims = np.array(np.append(begin_dims, [1] * (2 - len(begin_dims))), dtype=np.int64)
            reshape_shape = np.array([*begin_dims, reduction_dim, end_dim], dtype=np.int64)
            pool_window = np.array([1, 1, reduction_dim, 1], dtype=np.int64)
        elif layout == 'NHWC':
            begin_dims = np.prod(begin_dims)
            reshape_shape = np.array([begin_dims, reduction_dim, 1, end_dim], dtype=np.int64)
            pool_window = np.array([1, reduction_dim, 1, 1], dtype=np.int64)
        else:
            log.error('{} layout currently is not supported'.format(layout))
            return

        # 3. Reduce => Reshape->Pooling->Reshape
        reshape_op = Reshape(graph, {'name': node.id + '/Reshape', 'dim': reshape_shape})
        final_reshape_op = Reshape(graph, {'name': node.id + '/FinalReshape', 'dim': output_shape})
        pooling_op = Pooling(graph,
                             dict(name=node.id + '/Pool',
                                  window=pool_window,
                                  output_spatial_shape=None,
                                  batch_dims=np.array([get_batch_dim(layout, 4)], dtype=np.int64),
                                  channel_dims=np.array([get_features_dim(layout, 4)], dtype=np.int64),
                                  exclude_pad='false', pool_method=self.pool_method_map[reduce_type]))

        graph.remove_edge(input_data.id, node.id)
        graph.remove_edge(node.id, output_data.id)

        final_reshape_op.create_node_with_data(
            inputs=[pooling_op.create_node_with_data(
                inputs=[reshape_op.create_node_with_data(
                    inputs=[input_data]
                )]
            )],
            data_nodes=output_data)

        # 4. If it is reduction with summation, we need to multiply by size of the reduction slice with Mul op
        if reduce_type == 'sum':
            output_data.in_node().insert_node_with_data_after(
                output_data,
                Power,
                {'name': node.name + '/Mul', 'scale': float(reduction_dim)}
            )
Ejemplo n.º 5
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['reduce']

        if node.out_port(0).data.get_value() is not None:
            # We leave Reduce* operations located in constant sub-graph as is
            # to keep model reshapable with --keep_shape_ops cli key
            return

        reduce_type = node.type
        if reduce_type not in self.pool_method_map:
            log.error(
                "Reduce type {} is not included in pool_method_map. Please update pool_method_map with new key "
                "{}".format(reduce_type, reduce_type))
            return

        input_data = node.in_node()
        output_data = node.out_node()

        input_shape = node.in_port(0).data.get_shape()
        output_shape = node.out_port(0).data.get_shape()

        # normalize node axes to exclude negative indices
        axes_data_value = node.in_port(1).data.get_value()
        axes = int64_array([
            axes_data_value.item()
        ]) if axes_data_value.size == 1 else axes_data_value
        axes = [get_canonical_axis_index(input_shape, a) for a in axes]
        axes = sorted(axes)

        # Check that values in axes list are consecutive
        for idx in range(1, len(axes)):
            if axes[idx] != (axes[idx - 1] + 1):
                log.error(
                    "Reduce with not consecutive axes {} is not supported ".
                    format(axes))
                return
        # So now we are sure that we can convert Reduce to appropriate operation

        # 1. Calculate shape that will be used in reduction
        reduction_dim = np.prod([input_shape[idx] for idx in axes])
        begin_dims = np.array([input_shape[idx] for idx in range(axes[0])])
        end_dim = np.prod([
            input_shape[idx] for idx in range(axes[-1] + 1, len(input_shape))
        ])

        # 2. Create reshape with appropriate shape
        if len(begin_dims) > 2:
            if 0 not in axes:
                begin_dims = int64_array(
                    [begin_dims[0], np.prod(begin_dims[1:])])
            else:
                begin_dims = int64_array(
                    [np.prod(begin_dims[0:-1]), begin_dims[-1]])
        else:
            # Expand begin_dims to 2
            begin_dims = int64_array(
                np.append(begin_dims, [1] * (2 - len(begin_dims))))

        reshape_shape = int64_array([*begin_dims, reduction_dim, end_dim])
        pool_window = int64_array([1, 1, reduction_dim, 1])

        if end_dim == 1:
            new_window = ReduceReplacer.initial_reshape_dim_normalizer(
                reduction_dim)
            reshape_shape = int64_array([*begin_dims, *new_window])
            pool_window = int64_array([1, 1, *new_window])

        # 3. Reduce => Reshape->Pooling->Reshape
        reshape_op = Reshape(graph, {'name': node.id + '/Reshape'})
        reshape_dim_const_data = Const(graph, {
            'name': node.id + '/Reshape/Dim',
            'value': reshape_shape
        }).create_node_with_data()

        final_reshape_op = Reshape(graph, {'name': node.id + '/FinalReshape'})
        final_reshape_dim_const_data = Const(graph, {
            'name': node.id + '/FinalReshape/Dim',
            'value': output_shape
        }).create_node_with_data()
        pooling_op = Pooling(
            graph,
            dict(name=node.id + '/Pool',
                 window=pool_window,
                 output_spatial_shape=None,
                 batch_dims=int64_array([0]),
                 channel_dims=int64_array([1]),
                 exclude_pad='false',
                 pool_method=self.pool_method_map[reduce_type]))

        graph.remove_edge(input_data.id, node.id)
        graph.remove_edge(node.id, output_data.id)

        if np.array_equal(input_shape, reshape_shape):
            input_to_pooling = input_data
        else:
            input_to_pooling = reshape_op.create_node_with_data(
                inputs=[input_data, reshape_dim_const_data])
        pooling = pooling_op.create_node_with_data(inputs=[input_to_pooling])
        final_reshape_op.create_node_with_data(
            inputs=[pooling, final_reshape_dim_const_data],
            data_nodes=output_data)

        # convert batch dimension to 0 to produce reshape-able IR over the batch dimension
        if 0 not in axes:
            reshape_dim_const_data.in_node(0).value[0] = 0
            final_reshape_dim_const_data.in_node(0).value[0] = 0

        # 4. If it is reduction with summation, we need to multiply by size of the reduction slice with Mul op
        if reduce_type == 'ReduceSum':
            output_data.in_node().insert_node_with_data_after(
                output_data, AttributedPower, {
                    'name': node.name + '/Mul',
                    'scale': float(reduction_dim)
                })