Beispiel #1
0
    def replace_pattern(self, graph: Graph, match: dict):
        node = match['op']

        N, H, W, C = match['in_data'].shape
        block_size = node['block_size']

        graph.remove_edge(match['in_data'].id, node.id)
        graph.remove_edge(node.id, match['out_data'].id)

        dim_6D = int64_array([0, block_size, block_size, int(C / (block_size ** 2)), H, W])
        order_6D = int64_array([0, 3, 4, 1, 5, 2])
        dim_4D = int64_array([0, int(H * block_size), int(W * block_size), int(C / (block_size ** 2))])

        reshape_6_op = Reshape(graph, dict(name=node.id + '/Reshape_to_6D'))
        reshape_6_const_data = Const(graph, dict(value=dim_6D)).create_node_with_data()
        reshape_6_data_node = reshape_6_op.create_node_with_data([match['in_data'], reshape_6_const_data])
        mark_as_correct_data_layout(reshape_6_data_node.in_node(0))

        order_const_data = Const(graph, dict(value=order_6D)).create_node_with_data()

        transpose_op = Transpose(graph, dict(name=node.id + '/Transpose'))
        transpose_data_node = transpose_op.create_node_with_data([reshape_6_data_node, order_const_data])
        mark_as_correct_data_layout(transpose_data_node.in_node(0))

        reshape_4_op = Reshape(graph, dict(name=node.id + '/Reshape_to_4D'))
        reshape_4_const_data = Const(graph, dict(value=dim_4D)).create_node_with_data()
        reshape_4_data_node = reshape_4_op.create_node_with_data([transpose_data_node, reshape_4_const_data],
                                                                 data_nodes=[match['out_data']])
        mark_input_as_in_correct_layout(reshape_4_data_node.in_node(0), 0)
        mark_output_as_in_correct_layout(reshape_4_data_node.in_node(0), 0)
Beispiel #2
0
def mark_squeeze_reshape_concat_before_detection_output(start_nodes: list):
    """
    The function looks for Reshape, Concat and Squeeze ops after the 'start_nodes' with 4D output and marks them with
    proper attributes to infer them in original NHWC layout. This is a case of the TensorFlow Object Detection API
    models for the SSD heads output which produces 4D tensor with bounding box deltas.
    :param start_nodes: list of nodes to start search from.
    :return: None
    """
    q = collections.deque()
    q.extend(start_nodes)
    while len(q) != 0:
        cur_node = q.popleft()
        if cur_node.has_valid('type'):
            if cur_node.soft_get('type') == 'DetectionOutput':  # do not go beyond the DetectionOutput node
                continue
            # the input to Reshape comes from Convolution so it will be converted from NCHW to NHWC layout in the
            # InsertLayoutPropagationTransposes transformation. But the output should be kept in the original layout
            if cur_node.soft_get('type') == 'Reshape' and len(cur_node.out_port(0).data.get_shape()) == 4:
                mark_output_as_in_correct_layout(cur_node, 0)

            # Concat should be inferred in the original layout so the input with concatenation axis should not be
            # updated from NHWC to NCHW layout
            if cur_node.soft_get('type') == 'Concat' and len(cur_node.out_port(0).data.get_shape()) == 4:
                cur_node.in_port(1).__setattr__('input_permutation', None)
                cur_node['nchw_layout'] = True
                cur_node.out_node(0)['nchw_layout'] = True

            # Squeeze should be inferred in the original layout so the input with squeeze axis should not be updated
            # from NHWC to NCHW layout. The input is marked as in correct layout to prevent from inserting Transpose
            # from NHWC to NCHW.
            if cur_node.soft_get('type') == 'Squeeze' and len(cur_node.in_port(0).data.get_shape()) == 4:
                cur_node.in_port(1).__setattr__('input_permutation', None)
                mark_input_as_in_correct_layout(cur_node, 0)

        [q.append(port.node) for port in cur_node.out_port(0).get_destinations()]
Beispiel #3
0
 def mark_node_as_in_correct_layout_by_in_port(self, in_port):
     next_in_ports = self.get_next_in_ports(in_port)
     in_port.__setattr__('input_permutation', None)
     mark_input_as_in_correct_layout(in_port.node, in_port.idx)
     for port in next_in_ports:
         mark_output_as_in_correct_layout(port.get_source().node,
                                          port.get_source().idx)
Beispiel #4
0
def add_convolution_to_swap_xy_coordinates(graph: Graph, input_node: Node,
                                           coordinates_size: int):
    """
    The function add convolution node after the node 'input_node' to swap xy coordinates of the boxes produced
    by the node 'input_node'. It is expected that box coordinates are located in the fastest changing dimension of the
    'input_node' output, i.e. the input tensor could be reshaped to [num_boxes, 4] or [num_boxes, 5]. If the size is 5,
    then the 0-th element for each of num_boxes blocks is not changed and element 1 is swapped with element 2, element 3
    is swapped with element 4. This is the case when boxes coordinates are produced by the layer "Proposal". The exact
    amount of elements in each block is equal to the 'coordinates_size' parameter.
    :param graph: graph to operate on.
    :param input_node: node producing boxes coordinates.
    :param coordinates_size: integer value equal to 4 or 5.
    :return convolution node that swaps coordinates.
    """
    # swap of input tensor with 4 or 5 numbers describing boxes are supported
    assert (coordinates_size in [4, 5])

    input_reshape_4d_node = create_op_node_with_second_input(
        graph, Reshape, int64_array([-1, 1, 1, coordinates_size]),
        dict(name=input_node.name + '/reshape_4d'), input_node)
    mark_input_as_in_correct_layout(input_reshape_4d_node, 0)
    # do not mark second input because the reshape works in initial model layout and needs to be transformed to NCHW
    mark_output_as_in_correct_layout(input_reshape_4d_node, 0)

    if coordinates_size == 5:
        # zero indexed element is not box coordinate ("batch id" in case of Proposal)
        conv_filter_data = np.array(
            np.array([[[[1, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 1, 0, 0, 0],
                        [0, 0, 0, 0, 1], [0, 0, 0, 1, 0]]]],
                     dtype=np.float32))
    else:
        conv_filter_data = np.array(
            np.array(
                [[[[0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]]]],
                dtype=np.float32))

    conv_filter_data = np.transpose(conv_filter_data, [2, 3, 0, 1])

    conv_filter_const_op = Const(graph, dict(value=conv_filter_data))
    conv_filter_const_node = conv_filter_const_op.create_node(
        [], dict(name=input_node.name + '/weights'))

    conv_op = Convolution(
        graph, {
            'bias_addable': True,
            'channel_dims': np.array([3]),
            'batch_dims': np.array([0]),
            'input_feature_channel': 0,
            'output_feature_channel': 1,
            'group': 1,
            'layout': 'NHWC',
        })
    return conv_op.create_node([input_reshape_4d_node, conv_filter_const_node],
                               dict(name=input_node.name + "/conv"))
Beispiel #5
0
    def find_and_replace_pattern(self, graph: Graph):
        shape_ops = graph.get_op_nodes(op='ShapeOf')

        # 1. Inserting Gather to N*C format on constant shape paths
        for shape in shape_ops:
            source_port = shape.in_port(0).get_source()
            if is_output_data_in_correct_layout(source_port.node, source_port.idx):
                continue  # data is already in N*C format

            name = shape.soft_get('name', shape.id)
            rank = source_port.data.get_shape().size

            if rank in [4, 5]:
                index = int64_array([0, *list(range(2, rank)), 1])
            else:
                continue  # data is layout independent

            gather = create_op_with_const_inputs(graph, op=Gather, port_value_dict={1: index, 2: int64_array(0)},
                                                 op_attrs={'name': name + '/GatherNCHWtoNHWC'})
            shape.out_port(0).get_connection().insert_node(gather)

        # 2. Inserting Gather/Transpose to NC* format
        shape_sub_graph_end_points = self.find_shape_subgraph_endpoints([shape.out_port(0) for shape in shape_ops])
        for in_port in shape_sub_graph_end_points:
            name = in_port.node.soft_get('name', in_port.node.id)
            shape = in_port.data.get_shape()

            should_switch_layout = not any([is_output_data_in_correct_layout(port.node, port.idx)
                                            for port in in_port.node.out_ports().values() if not port.disconnected()])
            should_insert_gather = should_switch_layout and len(shape) == 1 and shape.item(0) in [4, 5]
            should_insert_transpose = should_switch_layout and len(shape) in [4, 5]

            if should_insert_gather:
                # we should turn input permutation off to perform it with the following gather insertion
                in_port.__setattr__('input_permutation', None)
                index = int64_array([0, shape.item(0) - 1, *list(range(1, shape.item(0) - 1))])
                gather = create_op_with_const_inputs(graph, op=Gather,
                                                     port_value_dict={1: index, 2: int64_array(0)},
                                                     op_attrs={'name': name + '/GatherNHWCtoNCHW'})
                in_port.get_connection().insert_node(gather)
            elif should_insert_transpose:
                # we should turn input permutation off to perform it with the following transpose insertion
                in_port.__setattr__('input_permutation', None)
                order = int64_array([0, len(shape) - 1, *list(range(1, len(shape) - 1))])
                transpose = create_op_with_const_inputs(graph, op=Transpose, port_value_dict={1: order},
                                                        op_attrs={'name': name + '/TransposeNHWCtoNCHW',
                                                                  'override_output_shape': True})
                mark_input_as_in_correct_layout(transpose, 0)
                mark_output_as_in_correct_layout(transpose, 0)
                in_port.get_connection().insert_node(transpose)
            else:
                continue  # data is layout independent
    def find_shape_subgraph_endpoints(self,
                                      out_ports: List[Port],
                                      visited: set = None) -> Set[Port]:
        """
        Searches for input ports of data dependent operations starting from output ports passed to the function.
        Condition for data dependent operations is absence of node output value.

        Side action: marking the sub-graph as it is in the correct layout

        :param out_ports: list of output ports to start search from
        :param visited: set of input ports that were visited to avoid visiting them more than once
        :return: set of input ports of data dependent operations
        """
        if visited is None:
            visited = set()

        deque_of_in_ports = deque()
        for out_port in out_ports:
            deque_of_in_ports.extend(out_port.get_destinations())

        end_points_in_ports = set()
        while len(deque_of_in_ports):
            in_port = deque_of_in_ports.popleft()
            if in_port in visited:
                continue
            next_in_ports = self.get_next_in_ports(in_port)
            if any([port.data.get_value() is None for port in next_in_ports]):
                end_points_in_ports.add(in_port)
            else:
                in_port.__setattr__('input_permutation', None)
                mark_input_as_in_correct_layout(in_port.node, in_port.idx)
                for port in next_in_ports:
                    mark_output_as_in_correct_layout(port.get_source().node,
                                                     port.get_source().idx)
                deque_of_in_ports.extend(next_in_ports)
            visited.add(in_port)
        return end_points_in_ports
    def find_and_replace_pattern(self, graph: Graph):
        visited = set()
        marked_nodes = set()
        condition_forward = lambda n: not InsertLayoutPropagationTranspose.is_nhwc_to_nchw_transpose_needed(
            n)
        condition_backward = lambda n: not InsertLayoutPropagationTranspose.is_nchw_to_nhwc_transpose_needed(
            n)
        for node_condition in self.op_conditions:
            for node in graph.get_op_nodes():
                if node_condition(node):
                    log.debug(
                        'Detected node "{}" as a node which should be executed in the original layout'
                        ''.format(node.soft_get('name', node.id)))
                    forward_visited_nodes = self.bfs([node], visited,
                                                     condition_forward, True)
                    backward_visited_nodes = self.bfs([node], visited,
                                                      condition_backward,
                                                      False)

                    # find "reinterp_shape" like ops which change rank of input to 4D or 5D from smaller dimensions
                    for back_node in backward_visited_nodes:
                        for input_node in self.get_input_nodes(back_node):
                            if input_node not in backward_visited_nodes and not condition_forward(
                                    input_node):
                                marked_nodes.add(input_node)

                    # find "reinterp_shape" like ops which change rank of input from 4D or 5D to smaller dimensions
                    for forward_node in forward_visited_nodes:
                        for output_node in self.get_output_nodes(forward_node):
                            if output_node not in forward_visited_nodes and not condition_backward(
                                    output_node):
                                marked_nodes.add(output_node)

                    marked_nodes.update(forward_visited_nodes +
                                        backward_visited_nodes)

        if len(marked_nodes):
            log.debug(
                'The following nodes will be executed in the original layout: {}'
                ''.format([n.soft_get('name', n.id) for n in marked_nodes]))

            # mark all matched nodes as in correct layout and disable attributes permutation for them
            for visited_node in marked_nodes:
                mark_as_correct_data_layout(visited_node)
                visited_node['nchw_layout'] = True

        _, nodes_weigths, nodes_in_weights = self.get_ports_and_nodes_on_weights(
            graph)
        for node in nodes_weigths:
            if node in nodes_in_weights:
                for ind, port in node.in_ports().items():
                    if ind not in nodes_in_weights[node]:
                        mark_input_as_in_correct_layout(node, ind)
                for ind, port in node.out_ports().items():
                    mark_output_as_in_correct_layout(node, ind)
            else:
                mark_as_correct_data_layout(node)
            node['nchw_layout'] = True
            if node.soft_get(
                    'type'
            ) == 'Const':  # WA for Const op deletion during clean_up
                node.out_node()['nchw_layout'] = True

        for node in self.get_ports_and_nodes_on_shape_subgraphs(graph)[1]:
            mark_as_correct_data_layout(node)
            node['nchw_layout'] = True
            if node.soft_get(
                    'type'
            ) == 'Const':  # WA for Const op deletion during clean_up
                node.out_node()['nchw_layout'] = True