コード例 #1
0
    def replace_pattern(self, graph: Graph, match: dict):
        slice_like = match['slice_like']
        anchor_node = slice_like.in_port(0).get_source().node
        reshape = slice_like.out_nodes()[0].out_node()
        slice_shape = slice_like.out_nodes()[0].shape
        anchor_node.value = np.copy(
            anchor_node.value[:slice_shape[0], :slice_shape[1], :
                              slice_shape[2], :slice_shape[3], :slice_shape[4]]
        )
        anchor_node.shape = slice_shape

        val_node = Const(
            graph, {
                'name':
                slice_like.name + '/croped_',
                'value':
                anchor_node.
                value[:slice_shape[0], :slice_shape[1], :slice_shape[2], :
                      slice_shape[3], :slice_shape[4]],
                'shape':
                slice_shape
            }).create_node_with_data()
        slice_like.in_port(0).disconnect()
        slice_like.in_port(1).disconnect()
        slice_like.out_port(0).disconnect()
        reshape.in_port(0).connect(val_node.in_node().out_port(0))
コード例 #2
0
    def replace_pattern(self, graph: Graph, match: dict):
        #self.pattern()['nodes']
        concat_node = match['concat']
        if len(concat_node.out_nodes()[0].out_nodes()) == 0:
            return
        const_values = []
        for in_node_index in concat_node.in_nodes():
            current_node = concat_node.in_port(in_node_index).get_source().node
            for k, v in reversed(self.pattern()['nodes'][:-1]):
                if 'op' in v:
                    assert current_node.op == v['op']
                    current_node = current_node.in_port(0).get_source().node
                    if current_node.op == 'Const':
                        crop_value = current_node.value
                        crop_value = np.reshape(crop_value, (1, -1))
                        const_values.append(crop_value)
                        break
        concat_value = np.concatenate(tuple(const_values), axis=1)
        concat_value = np.reshape(concat_value, (1, 2, -1))
        slice_value = concat_value[0][0]
        for i in range(int(concat_value[0][0].size / 4)):
            index = i * 4
            xmin = slice_value[index] - (slice_value[index + 2] / 2)
            ymin = slice_value[index + 1] - (slice_value[index + 3] / 2)
            xmax = slice_value[index] + (slice_value[index + 2] / 2)
            ymax = slice_value[index + 1] + (slice_value[index + 3] / 2)
            slice_value[index] = xmin
            slice_value[index + 1] = ymin
            slice_value[index + 2] = xmax
            slice_value[index + 3] = ymax

        val_node = Const(graph, {
            'name': concat_node.name + '/const_',
            'value': concat_value
        }).create_node_with_data()
        out_node = concat_node.out_port(0).get_destination().node
        concat_node.out_port(0).disconnect()
        out_node.in_port(2).connect(val_node.in_node().out_port(0))
コード例 #3
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['reduce']

        if node.out_port(0).data.get_value() is not None:
            # We leave Reduce* operations located in constant sub-graph as is
            # to keep model reshapable with --keep_shape_ops cli key
            return

        reduce_type = node.type
        if reduce_type not in self.pool_method_map:
            log.error(
                "Reduce type {} is not included in pool_method_map. Please update pool_method_map with new key "
                "{}".format(reduce_type, reduce_type))
            return

        input_data = node.in_node()
        output_data = node.out_node()

        input_shape = node.in_port(0).data.get_shape()
        output_shape = node.out_port(0).data.get_shape()

        # normalize node axes to exclude negative indices
        axes_data_value = node.in_port(1).data.get_value()
        axes = int64_array([
            axes_data_value.item()
        ]) if axes_data_value.size == 1 else axes_data_value
        axes = [get_canonical_axis_index(input_shape, a) for a in axes]
        axes = sorted(axes)

        # Check that values in axes list are consecutive
        for idx in range(1, len(axes)):
            if axes[idx] != (axes[idx - 1] + 1):
                log.error(
                    "Reduce with not consecutive axes {} is not supported ".
                    format(axes))
                return
        # So now we are sure that we can convert Reduce to appropriate operation

        # 1. Calculate shape that will be used in reduction
        reduction_dim = np.prod([input_shape[idx] for idx in axes])
        begin_dims = np.array([input_shape[idx] for idx in range(axes[0])])
        end_dim = np.prod([
            input_shape[idx] for idx in range(axes[-1] + 1, len(input_shape))
        ])

        # 2. Create reshape with appropriate shape
        if len(begin_dims) > 2:
            if 0 not in axes:
                begin_dims = int64_array(
                    [begin_dims[0], np.prod(begin_dims[1:])])
            else:
                begin_dims = int64_array(
                    [np.prod(begin_dims[0:-1]), begin_dims[-1]])
        else:
            # Expand begin_dims to 2
            begin_dims = int64_array(
                np.append(begin_dims, [1] * (2 - len(begin_dims))))

        reshape_shape = int64_array([*begin_dims, reduction_dim, end_dim])
        pool_window = int64_array([1, 1, reduction_dim, 1])

        if end_dim == 1:
            new_window = ReduceReplacer.initial_reshape_dim_normalizer(
                reduction_dim)
            reshape_shape = int64_array([*begin_dims, *new_window])
            pool_window = int64_array([1, 1, *new_window])

        # 3. Reduce => Reshape->Pooling->Reshape
        reshape_op = Reshape(graph, {'name': node.id + '/Reshape'})
        reshape_dim_const_data = Const(graph, {
            'name': node.id + '/Reshape/Dim',
            'value': reshape_shape
        }).create_node_with_data()

        final_reshape_op = Reshape(graph, {'name': node.id + '/FinalReshape'})
        final_reshape_dim_const_data = Const(graph, {
            'name': node.id + '/FinalReshape/Dim',
            'value': output_shape
        }).create_node_with_data()
        pooling_op = Pooling(
            graph,
            dict(name=node.id + '/Pool',
                 window=pool_window,
                 output_spatial_shape=None,
                 batch_dims=int64_array([0]),
                 channel_dims=int64_array([1]),
                 exclude_pad='false',
                 pool_method=self.pool_method_map[reduce_type]))

        graph.remove_edge(input_data.id, node.id)
        graph.remove_edge(node.id, output_data.id)

        if np.array_equal(input_shape, reshape_shape):
            input_to_pooling = input_data
        else:
            input_to_pooling = reshape_op.create_node_with_data(
                inputs=[input_data, reshape_dim_const_data])
        pooling = pooling_op.create_node_with_data(inputs=[input_to_pooling])
        final_reshape_op.create_node_with_data(
            inputs=[pooling, final_reshape_dim_const_data],
            data_nodes=output_data)

        # convert batch dimension to 0 to produce reshape-able IR over the batch dimension
        if 0 not in axes:
            reshape_dim_const_data.in_node(0).value[0] = 0
            final_reshape_dim_const_data.in_node(0).value[0] = 0

        # 4. If it is reduction with summation, we need to multiply by size of the reduction slice with Mul op
        if reduce_type == 'ReduceSum':
            output_data.in_node().insert_node_with_data_after(
                output_data, AttributedPower, {
                    'name': node.name + '/Mul',
                    'scale': float(reduction_dim)
                })