Esempio n. 1
0
    def ti_type_infer(node):
        from openvino.tools.mo.middle.passes.infer import type_infer
        ti_graph = node.body

        for record in node.input_port_map:
            internal_node = get_internal_node_by_layer_id(node, record['internal_layer_id'])
            assert internal_node.soft_get('type') == 'Parameter', internal_node.soft_get('type')

            real_external_port_idx = TensorIterator.special_port_to_real_port(node, record['external_port_id'])
            external_data_type = node.in_port(real_external_port_idx).get_connection().get_source().get_data_type()
            internal_node.data_type = external_data_type

        fake_input_const_nodes = []
        # create fake const node to make type inference work correctly for all TI input nodes
        for data_node in ti_graph.get_data_nodes(has_value=True):
            if len(data_node.in_nodes()) == 0:
                const_node = Const(ti_graph, {'name': 'const_', 'value': data_node.value}).create_node()
                fake_input_const_nodes.append(const_node)
                ti_graph.create_edge(const_node, data_node)

        type_infer(ti_graph)

        # propagate data types to the TI output ports
        for record in node.output_port_map:
            internal_node = get_internal_node_by_layer_id(node, record['internal_layer_id'])
            assert internal_node.soft_get('type') == 'Result', internal_node.soft_get('type')

            internal_data_type = internal_node.in_port(0).get_data_type()
            real_external_port_idx = TensorIterator.special_port_to_real_port(node, record['external_port_id'], 'out')
            node.out_port(real_external_port_idx).set_data_type(internal_data_type)

        ti_graph.remove_nodes_from([node.id for node in fake_input_const_nodes])
Esempio n. 2
0
    def _insert_fake_quantize(self, graph):
        if self.fq_insertion.ignored_params['skip_model']:
            return graph

        self.nodes_marker.mark_ignored_blocks(graph, self.target_device)
        graph.clean_up()

        type_infer(graph)

        self.fq_insertion.find_and_replace_pattern(graph)
        graph.clean_up()

        self.fq_propagation.find_and_replace_pattern(graph)
        graph.clean_up()

        self.fq_optimization.find_and_replace_pattern(graph)
        graph.clean_up()

        self.fq_propagation.delete_fq_non_quantizable_node_precision(graph)
        graph.clean_up()

        self.fq_name_swapper.rename_fqs_in_the_end(graph)
        graph.clean_up()

        self.fq_removal.optimize_for_gp_hw(graph, self.target_device)
        graph.clean_up()

        return graph
Esempio n. 3
0
 def type_infer(if_node: Node):
     from openvino.tools.mo.middle.passes.infer import type_infer
     If.update_body_parameters_type(if_node, True)
     If.update_body_parameters_type(if_node, False)
     type_infer(if_node.then_graph)
     type_infer(if_node.else_graph)
     If.update_if_output_ports_type(if_node)
Esempio n. 4
0
def prepare_emit_ir(graph: Graph, data_type: str, output_dir: str, output_model_name: str,
                    mean_data: [list, None] = None, input_names: list = None, meta_info: dict = None,
                    use_temporary_path=False, used_by_ir_reader=False):
    if input_names is None:
        input_names = []
    if meta_info is None:
        meta_info = {}
    graph.strict_mode = False

    # temporary disable new FP16 generation
    # if not used_by_ir_reader:
    if True:
        # convert Parameter data types
        convert_data_type.convert_parameters_data_type(graph, data_type)
        # convert blobs (usually weights and biases)
        for sub_graph in [graph] + collect_sub_graphs(graph):
            convert_data_type.convert_blobs(sub_graph, data_type)

    # restore data type for specific inputs/outputs of specific ops to the data types required by nGraph
    for_graph_and_each_sub_graph_recursively(graph, convert_inputs_of_specific_ops)

    for_graph_and_each_sub_graph_recursively(graph, OpVersioning().find_and_replace_pattern)

    # do not run the type inference in sub-graphs. It will be called automatically as part of the type inference of
    # the TensorIterator nodes
    type_infer(graph)

    for_graph_and_each_sub_graph_recursively(graph, RemoveUselessConvert().find_and_replace_pattern)

    ResultRename().find_and_replace_pattern(graph)

    for sub_graph in [graph] + collect_sub_graphs(graph):
        op_order, data_order = determined_sort(get_sorted_outputs(sub_graph))
        mapping = {v: u for u, v in enumerate(op_order)}
        mapping.update({v: u for u, v in enumerate(data_order, start=len(sub_graph))})
        relabel_nodes_inplace_safe(sub_graph, mapping)
        port_renumber(sub_graph)

    tensor_names.propagate_op_name_to_tensor(graph)

    ir_path_suffix = "_tmp" if use_temporary_path else ""

    bin_file = os.path.join(output_dir, '{}{}.bin'.format(output_model_name, ir_path_suffix))
    serialize_constants(graph, bin_file)

    mean_offset = None
    mean_size = None
    if mean_data:
        mean_offset, mean_size = serialize_mean_image(bin_file, mean_data=mean_data)

    generate_ie_ir(graph=graph,
                   file_name=os.path.join(output_dir, '{}{}.xml'.format(output_model_name, ir_path_suffix)),
                   input_names=input_names,
                   mean_offset=mean_offset,
                   mean_size=mean_size,
                   meta_info=meta_info)
    tensor_names.output_tensor_names_map(graph, os.path.join(output_dir, '{}{}.mapping'.format(output_model_name, ir_path_suffix)))
Esempio n. 5
0
    def test_not_raises(self):
        edges = [
            *connect('input_1', '0:add'),
            *connect('input_2', '1:add'),
            *connect('add', 'result')
        ]
        graph = self.build_graph_to_test_type_alignment(edges, input_1_type=np.float32, input_2_type=np.float32)

        type_infer(graph)
        add_node = Node(graph, 'add')
        self.assertEquals(add_node.out_port(0).get_data_type(), np.float32)
Esempio n. 6
0
    def test_second_input_const(self):
        edges = [
            *connect('input_1', '0:add'),
            *connect('const', '1:add'),
            *connect('add', 'result')
        ]
        graph = self.build_graph_to_test_type_alignment(edges, input_1_type=np.float32, const_type=np.float16)

        type_infer(graph)
        const_node = Node(graph, 'const')
        self.assertEquals(const_node.out_port(0).get_data_type(), np.float32)
Esempio n. 7
0
 def type_infer(loop_node: Node):
     from openvino.tools.mo.middle.passes.infer import type_infer
     Loop.update_body_parameters_type(loop_node)
     type_infer(loop_node.body)
     Loop.update_loop_output_ports_type(loop_node)
Esempio n. 8
0
def nx_type_infer(model):
    """ Apply type_infer for each model in NXModel wrapper
    """
    for model_dict in model.models:
        type_infer(model_dict['model'])