def test_transpose_insert_with_two_result_nodes(self, nhwc_to_nchw_order, nchw_to_nhwc_order,
                                                    add_permutation_attrs, fft_kind):
        shape_len = len(nhwc_to_nchw_order) if add_permutation_attrs else 3
        shape = np.array(range(shape_len))
        add_shape = shape if nhwc_to_nchw_order is None else shape[nhwc_to_nchw_order]
        graph = build_graph(nodes_attrs=nodes_for_case_with_two_results,
                            edges=edges_for_case_with_two_results,
                            update_attributes={
                                'placeholder1_data': {'shape': int64_array(shape)},
                                'placeholder1': {'shape': int64_array(shape), 'rt_info': RTInfo()},
                                'transpose_parameter_order': {
                                    'value': np.array(nhwc_to_nchw_order),
                                    'shape': int64_array(np.array(nhwc_to_nchw_order).shape)
                                },
                                'transpose_parameter_order_data': {
                                    'value': np.array(nhwc_to_nchw_order),
                                    'shape': int64_array(np.array(nhwc_to_nchw_order).shape)
                                },
                                'fft': {'op': fft_kind, 'type': fft_kind},
                                'add_data': {'shape': add_shape},
                                'fft_data': {'shape': add_shape},
                                'result1': {'shape': shape, 'rt_info': RTInfo()},
                                'result2': {'shape': shape, 'rt_info': RTInfo()},
                            })

        if add_permutation_attrs:
            graph_ref = build_graph(nodes_for_case_with_two_results, edges_with_transpose_for_case_with_two_results)
        else:
            graph_ref = build_graph(nodes_for_case_with_two_results, edges_for_case_with_two_results)

        param1_node = Node(graph, 'placeholder1')
        result1_node = Node(graph, 'result1')
        result2_node = Node(graph, 'result2')

        if add_permutation_attrs:
            shape_len = len(nhwc_to_nchw_order)
            param1_node['permute_attrs'] = PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            param1_node.out_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)
            result1_node.in_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)
            result2_node.in_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)

        PreserveRuntimeInfo().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result1')
        self.assertTrue(flag, resp)

        self.assertFalse(param1_node.has_valid('permute_attrs'))
        self.assertFalse(param1_node.out_node(0).has_valid('permutation'))

        if add_permutation_attrs:
            rt_info = param1_node.rt_info.info
            old_api_map = rt_info[('old_api_map_order', 0)].info
            self.assertTrue(np.array_equal(old_api_map['inverse_order'], nchw_to_nhwc_order))
Exemplo n.º 2
0
    def extract(cls, node):
        shape = shape_array([])
        # Extract output shape from `shape` attribute
        extracted_shape = tf_tensor_shape(node.pb.attr["shape"].shape)
        if len(extracted_shape) != 0:
            shape = extracted_shape
        else:
            # Extract output shape from `_output_shapes` attribute if it is possible
            extracted_output_shapes = node.pb.attr["_output_shapes"].list.shape
            if len(extracted_output_shapes) == 1:   # check if attribute not empty
                extracted_output_shapes = tf_tensor_shape(extracted_output_shapes[0])

                # Check equality of extracted shapes. We know some cases then Placeholder operation has empty `shape`
                # attribute value and non-empty `_output_shapes` attribute value and need co handle and support it.
                if len(extracted_output_shapes) > len(extracted_shape):
                    log.warning('Extracted shapes for Placeholder operation {} have different lengths: `shape` {} and '
                                '`_output_shapes` {}. Please, check if model is consistent'.format(
                        node.pb.name, extracted_shape, extracted_output_shapes))
                    if len(extracted_output_shapes) != 0:
                        shape = extracted_output_shapes

        attrs = {
            'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
            'shape': shape,
            'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
        }
        if node.pb.attr["shape"].shape.unknown_rank:
            attrs['shape'] = None
        Parameter.update_node_stat(node, attrs)
        return cls.enabled
    def find_and_replace_pattern(self, graph: Graph):
        for node in graph.get_data_nodes():
            if node.has_and_set('nchw_layout'):
                continue

            # Get NHWC to NCHW permutation for N dims, where N = len(node.shape)
            permutation = PermuteAttrs().get_nhwc_to_nchw_permutation(len(node.shape))

            # Check that data node already has permutation
            skip_permutation = False
            for in_node in node.in_nodes():
                edge_attrs = node.graph.get_edge_data(in_node.id, node.id)[0]
                if 'permutation' in edge_attrs:
                    skip_permutation = True
            for out_node in node.out_nodes():
                edge_attrs = node.graph.get_edge_data(node.id, out_node.id)[0]
                if 'permutation' in edge_attrs:
                    skip_permutation = True

            if skip_permutation:
                continue

            # Set permutation to all in/out edges
            for in_node in node.in_nodes():
                PermuteAttrs.set_permutation(in_node, node, permutation)

            for out_node in node.out_nodes():
                PermuteAttrs.set_permutation(node, out_node, permutation)
    def test_transpose_insert(self, nhwc_to_nchw_order, nchw_to_nhwc_order, add_permutation_attrs):
        graph_nodes = {
            **valued_const_with_data('transpose_parameter_order', np.array(nhwc_to_nchw_order)),
            **valued_const_with_data('transpose_result_order', np.array(nchw_to_nhwc_order))
        }
        graph_nodes.update(nodes)
        shape_len = len(nhwc_to_nchw_order) if add_permutation_attrs else 3
        shape = np.array(range(shape_len))
        add_shape = shape if nhwc_to_nchw_order is None else shape[nhwc_to_nchw_order]
        graph_nodes.update(
            {
                **regular_op_with_shaped_data('placeholder1', shape,
                                              {'type': 'Parameter', 'rt_info': RTInfo(), 'shape': shape}),
                **regular_op_with_shaped_data('result', shape, {'type': 'Result', 'rt_info': RTInfo(), 'shape': shape}),
                **regular_op_with_shaped_data('add', add_shape,
                                              {'type': 'Add', 'op': 'Add', 'infer': copy_shape_infer}),
            }
        )

        graph = build_graph(graph_nodes, edges)
        graph_ref = build_graph(graph_nodes, edges_with_transpose if add_permutation_attrs else edges)

        param_node = Node(graph, 'placeholder1')
        result_node = Node(graph, 'result')

        if add_permutation_attrs:
            shape_len = len(nhwc_to_nchw_order)
            param_node['permute_attrs'] = PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            param_node.out_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)
            result_node.in_node(0)['permutation'] = PermuteAttrs().get_nhwc_to_nchw_permutation(shape_len)

        PreserveRuntimeInfo().find_and_replace_pattern(graph)

        (flag, resp) = compare_graphs(graph, graph_ref, 'result')
        self.assertTrue(flag, resp)

        self.assertFalse(param_node.has_valid('permute_attrs'))
        self.assertFalse(param_node.out_node(0).has_valid('permutation'))

        if add_permutation_attrs:
            rt_info = param_node.rt_info.info
            old_api_map = rt_info[('old_api_map_order', 0)].info
            self.assertTrue(np.array_equal(old_api_map['inverse_order'], nchw_to_nhwc_order))

            rt_info = result_node.rt_info.info
            old_api_map = rt_info[('old_api_map_order', 0)].info
            self.assertTrue(np.array_equal(old_api_map['order'], nhwc_to_nchw_order))
Exemplo n.º 5
0
 def extract(cls, node):
     attrs = {
         'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
         'shape': tf_tensor_shape(node.pb.attr["shape"].shape),
         'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
     }
     if node.pb.attr["shape"].shape.unknown_rank:
         attrs['shape'] = None
     Parameter.update_node_stat(node, attrs)
     return cls.enabled
Exemplo n.º 6
0
def convert_graph_inputs_to_parameters(internal_graph, internal_graph_proto):
    # create Parameter nodes for the body graph
    body_parameters = []
    body_parameter_names = []
    for idx, pb_node in enumerate(internal_graph_proto['input_arg']):
        param_id = internal_graph.unique_id(pb_node.name)
        internal_graph.add_node(param_id,
                                name=param_id,
                                kind='op',
                                op='Parameter',
                                pb=None,
                                shape=None)
        parameter_node = Node(internal_graph, pb_node.name)
        Parameter.update_node_stat(
            parameter_node, {
                'data_type':
                tf_dtype_extractor(pb_node.type),
                'permute_attrs':
                PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            })
        body_parameters.append(parameter_node)
        body_parameter_names.append(param_id)
    return body_parameters, body_parameter_names
    def find_and_replace_pattern(self, graph: Graph):

        # we need to import these functions here to avoid circular dependent imports
        from openvino.tools.mo.front.tf.graph_utils import create_op_node_with_second_input

        if graph.graph['layout'] != 'NHWC':
            # we check it here because this transformation is called explicitly from the pipeline
            return

        # reshape from 4D-5D -> ND. Insert Transpose(NC(D)HW->N(D)HWC) before Reshape
        for reinterp_shape_node_id in graph.get_nodes_with_attributes(
                reinterp_shape=True):
            reinterp_shape_node = Node(graph, reinterp_shape_node_id)
            assert 0 in reinterp_shape_node.in_nodes(
            ), 'Node {} does not have 0 input. \n{}'.format(
                reinterp_shape_node_id, graph.dump_graph_for_graphviz())
            input_shape = reinterp_shape_node.in_node(0).shape
            if self.is_nchw_to_nhwc_transpose_needed(reinterp_shape_node):
                permute_node = create_op_node_with_second_input(
                    graph, Transpose,
                    PermuteAttrs().get_nchw_to_nhwc_permutation(
                        len(input_shape)).perm,
                    {
                        'name':
                        reinterp_shape_node.in_port(0).get_source().node.name +
                        '/Transpose'
                    })
                reinterp_shape_node.in_port(0).get_connection().insert_node(
                    permute_node)

                order_const = permute_node.in_port(1).get_source().node
                order_const.infer(order_const)
                # do not infer the Transpose node because it should have input data node in NCHW layout (but currently
                # it is NHWC because data node attributes has not been permuted yet) and produce output in NHWC layout
                # (which is true at this moment)
                permute_node['need_shape_inference'] = False
                # mark the Transpose output data node having correct layout so it's shape will not be permuted
                mark_output_as_in_correct_layout(permute_node, 0)

                # keep the reinterp_shape_node in NHWC layout
                for in_port_id, _ in reinterp_shape_node.in_ports().items():
                    mark_input_as_in_correct_layout(reinterp_shape_node,
                                                    in_port_id)

        # reshape from ND -> 4D-5D. Insert Transpose(N(D)HWC->NC(D)HW) after Reshape
        for reinterp_shape_node_id in graph.get_nodes_with_attributes(
                reinterp_shape=True):
            reinterp_shape_node = Node(graph, reinterp_shape_node_id)
            assert 0 in reinterp_shape_node.out_nodes(
            ), 'Node {} does not have 0 output. \n{}'.format(
                reinterp_shape_node_id, graph.dump_graph_for_graphviz())
            output_shape = reinterp_shape_node.out_node(0).shape
            if self.is_nhwc_to_nchw_transpose_needed(reinterp_shape_node):
                permute_node = create_op_node_with_second_input(
                    graph, Transpose,
                    PermuteAttrs().get_nhwc_to_nchw_permutation(
                        len(output_shape)).perm,
                    {'name': reinterp_shape_node.id + '/Transpose'})
                reinterp_shape_node.out_port(0).get_connection().insert_node(
                    permute_node)

                # the Reshape and Transpose operations should work in original (NHWC layout) so the Transpose
                # will convert it to the NCHW
                mark_input_as_in_correct_layout(permute_node, 0)
                mark_input_as_in_correct_layout(permute_node, 1)
                # do not set Transpose output data node 'correct_data_layout' attribute so the data node shape will be
                # permuted

                # keep the reinterp_shape_node in NHWC layout
                mark_output_as_in_correct_layout(reinterp_shape_node, 0)
                for in_port_id in reinterp_shape_node.in_ports().keys():
                    if in_port_id:
                        mark_input_as_in_correct_layout(
                            reinterp_shape_node, in_port_id)

                # do not re-infer the Transpose node because it output data node should be in NHWC layout to make the
                # rest of the graph consistent
                permute_node['need_shape_inference'] = False