Пример #1
0
 def type_infer(if_node: Node):
     from mo.middle.passes.infer import type_infer
     If.update_body_parameters_type(if_node, True)
     If.update_body_parameters_type(if_node, False)
     type_infer(if_node.then_graph)
     type_infer(if_node.else_graph)
     If.update_if_output_ports_type(if_node)
Пример #2
0
    def ti_type_infer(node):
        from mo.middle.passes.infer import type_infer
        ti_graph = node.body

        for record in node.input_port_map:
            internal_node = get_internal_node_by_layer_id(node, record['internal_layer_id'])
            assert internal_node.soft_get('type') == 'Parameter', internal_node.soft_get('type')

            real_external_port_idx = TensorIterator.special_port_to_real_port(node, record['external_port_id'])
            external_data_type = node.in_port(real_external_port_idx).get_connection().get_source().get_data_type()
            internal_node.data_type = external_data_type

        fake_input_const_nodes = []
        # create fake const node to make type inference work correctly for all TI input nodes
        for data_node in ti_graph.get_data_nodes(has_value=True):
            if len(data_node.in_nodes()) == 0:
                const_node = Const(ti_graph, {'name': 'const_', 'value': data_node.value}).create_node()
                fake_input_const_nodes.append(const_node)
                ti_graph.create_edge(const_node, data_node)

        type_infer(ti_graph)

        # propagate data types to the TI output ports
        for record in node.output_port_map:
            internal_node = get_internal_node_by_layer_id(node, record['internal_layer_id'])
            assert internal_node.soft_get('type') == 'Result', internal_node.soft_get('type')

            internal_data_type = internal_node.in_port(0).get_data_type()
            real_external_port_idx = TensorIterator.special_port_to_real_port(node, record['external_port_id'], 'out')
            node.out_port(real_external_port_idx).set_data_type(internal_data_type)

        ti_graph.remove_nodes_from([node.id for node in fake_input_const_nodes])
Пример #3
0
    def test_not_raises(self):
        edges = [
            *connect('input_1', '0:add'),
            *connect('input_2', '1:add'),
            *connect('add', 'result')
        ]
        graph = self.build_graph_to_test_type_alignment(edges, input_1_type=np.float32, input_2_type=np.float32)

        type_infer(graph)
        add_node = Node(graph, 'add')
        self.assertEquals(add_node.out_port(0).get_data_type(), np.float32)
Пример #4
0
    def test_second_input_const(self):
        edges = [
            *connect('input_1', '0:add'),
            *connect('const', '1:add'),
            *connect('add', 'result')
        ]
        graph = self.build_graph_to_test_type_alignment(edges, input_1_type=np.float32, const_type=np.float16)

        type_infer(graph)
        const_node = Node(graph, 'const')
        self.assertEquals(const_node.out_port(0).get_data_type(), np.float32)
Пример #5
0
 def delete_fq_non_quantizable_node_precision(self, graph):
     type_infer(graph)
     fq_removal = RemoveFakeQuantize()
     fq_removal.quantize_agnostic_operations = self.quantize_agnostic_operations
     fq_removal.quantize_operations = self.quantize_operations
     node_int_fq = []
     fq_queue = deque(sorted(graph.get_op_nodes(type='FakeQuantize'), key=lambda x: x.name))
     while fq_queue:
         fq = fq_queue.popleft()
         if fq.in_port(0).get_source() is not None and fq.in_port(0).get_source().is_data_type_defined():
             type_node = fq.in_port(0).get_source().get_data_type()
             if type_node in (np.int32, np.int64, bool):
                 node_int_fq.append(fq.name)
                 fq_removal.find_and_remove_node(graph, fq.name)
Пример #6
0
def prepare_emit_ir(graph: Graph, data_type: str, output_dir: str, output_model_name: str,
                    mean_data: [list, None] = None, input_names: list = None, meta_info: dict = None):
    if input_names is None:
        input_names = []
    if meta_info is None:
        meta_info = {}
    graph.strict_mode = False

    # convert Parameter data types
    convert_data_type.convert_parameters_data_type(graph, data_type)
    # convert blobs (usually weights and biases)
    for sub_graph in [graph] + collect_sub_graphs(graph):
        convert_data_type.convert_blobs(sub_graph, data_type)

    # restore data type for specific inputs/outputs of specific ops to the data types required by nGraph
    if not graph.graph['cmd_params'].generate_deprecated_IR_V7:
        for_graph_and_each_sub_graph_recursively(graph, convert_inputs_of_specific_ops)

    if graph.graph['cmd_params'].generate_experimental_IR_V10:
        for_graph_and_each_sub_graph_recursively(graph, OpVersioning().find_and_replace_pattern)

    # do not run the type inference in sub-graphs. It will be called automatically as part of the type inference of
    # the TensorIterator nodes
    type_infer(graph)
    RemoveUselessConvert().find_and_replace_pattern(graph)

    for sub_graph in [graph] + collect_sub_graphs(graph):
        op_order, data_order = determined_sort(get_sorted_outputs(sub_graph))
        mapping = {v: u for u, v in enumerate(op_order)}
        mapping.update({v: u for u, v in enumerate(data_order, start=len(sub_graph))})
        relabel_nodes_inplace_safe(sub_graph, mapping)
        port_renumber(sub_graph)

    tensor_names.propagate_op_name_to_tensor(graph)

    bin_file = os.path.join(output_dir, '{}.bin'.format(output_model_name))
    serialize_constants(graph, bin_file)

    mean_offset = None
    mean_size = None
    if mean_data:
        mean_offset, mean_size = serialize_mean_image(bin_file, mean_data=mean_data)

    generate_ie_ir(graph=graph,
                   file_name=os.path.join(output_dir, '{}.xml'.format(output_model_name)),
                   input_names=input_names,
                   mean_offset=mean_offset,
                   mean_size=mean_size,
                   meta_info=meta_info)
    tensor_names.output_tensor_names_map(graph, os.path.join(output_dir, '{}.mapping'.format(output_model_name)))
Пример #7
0
    def _ti_type_infer(node):
        from mo.middle.passes.infer import type_infer
        ti_graph = node.body

        # create fake const node to make type inference work correctly for all TI input nodes
        fake_input_const_nodes = []
        for port_map in __class__.generate_port_map(node, node.input_port_map):
            internal_input_data = Node(ti_graph,
                                       port_map['internal_layer_id']).in_node(
                                           port_map['internal_port_id'])
            if len(internal_input_data.in_nodes()) == 0:
                input_producer_port = node.in_port(
                    port_map['external_port_id']).get_connection().get_source(
                    )
                input_type = input_producer_port.get_data_type()
                const_node = Const(ti_graph, {
                    'name': 'fake_const_',
                    'value': np.ones([1], dtype=input_type)
                }).create_node()
                fake_input_const_nodes.append(const_node)
                ti_graph.create_edge(const_node, internal_input_data)

        # create const Op node for constant data nodes inside the TI
        for data_node in ti_graph.get_data_nodes(has_value=True):
            if len(data_node.in_nodes()) == 0:
                const_node = Const(ti_graph, {
                    'name': 'const_',
                    'value': data_node.value
                }).create_node()
                fake_input_const_nodes.append(const_node)
                ti_graph.create_edge(const_node, data_node)

        type_infer(ti_graph)

        # propagate data types to the TI output ports
        output_port_map = __class__.generate_port_map(node,
                                                      node.output_port_map)
        for port_map in output_port_map:
            internal_output_port = Node(
                ti_graph, port_map['internal_layer_id']).out_port(
                    port_map['internal_port_id'])
            ti_output_port = node.out_port(port_map['external_port_id'])
            ti_output_port.set_data_type(internal_output_port.get_data_type())

        ti_graph.remove_nodes_from(
            [node.id for node in fake_input_const_nodes])
Пример #8
0
 def type_infer(loop_node: Node):
     from mo.middle.passes.infer import type_infer
     Loop.update_body_parameters_type(loop_node)
     type_infer(loop_node.body)
     Loop.update_loop_output_ports_type(loop_node)