Esempio n. 1
0
    def extend(op: Node):
        for attr in [
                'strides', 'dilations', 'pads_begin', 'pads_end',
                'output_padding'
        ]:
            Extender.attr_to_list(op, attr)

        op['stride'] = int64_array([1, 1] + op.strides)
        op['dilation'] = int64_array([1, 1] + op.dilations)

        op['batch_dims'] = int64_array([0])
        op['channel_dims'] = int64_array([1])

        if op.has_valid('output_padding'):
            op.output_padding = int64_array([0, 0] + op.output_padding)

        # Be VERY careful with these attributes!
        op['input_feature_channel'] = 1
        op['output_feature_channel'] = 0

        dim = len(op.pads_begin)

        assert dim in (1, 2, 3), '{}D Convolution not supported!'.format(dim)

        pad = [[0, 0], [0, 0]]
        pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)])

        op['pad'] = int64_array(pad)

        op['spatial_dims'] = [i + 2 for i in range(dim)]
Esempio n. 2
0
    def extend(op: Node):
        def normalize_port_map(port_map: dict):
            for port in port_map:
                for elem in ['axis', 'stride', 'part_size', 'start', 'end']:
                    if port.get(elem) is None:
                        port[elem] = None

        assert op.has(
            'body'
        ), 'Something wrong with TensorIterator layer {}, please check!'.format(
            op.name)

        # Now op.body is an IREngine, we need to replace it with IREngine.graph
        op.body.graph.graph['cmd_params'] = op.graph.graph['cmd_params']
        op.body.graph.graph['ir_version'] = op.graph.graph['ir_version']
        op.body.graph.name = op.name + '/body'

        for node in op.body.graph.get_op_nodes():
            node['internal_layer_id'] = int(node.id)

        op.body = copy_graph_with_ops(op.body.graph)

        normalize_port_map(op.input_port_map)
        normalize_port_map(op.output_port_map)

        for edge in op.back_edges:
            edge['from_layer'] = edge['from-layer']
            edge['to_layer'] = edge['to-layer']

            del (edge['from-layer'])
            del (edge['to-layer'])

        op['infer'] = Extender.use_shapes_from_ir
Esempio n. 3
0
    def use_shapes_from_ir(node: Node):
        # This function used instead of operation shape inference function to set all output shapes the same as
        # restored from IR. Firstly, check equality of old (restored from IR) and
        # new (calculated while shape inference) input shapes
        node['new_input_shapes'] = list()
        for n in node.in_ports():
            if not node.in_port(n).disconnected(
            ):  # We use such condition to handle optional inputs
                node.new_input_shapes.append(node.in_port(n).data.get_shape())
        assert len(node.new_input_shapes) == len(node.old_input_shapes), \
            'Something wrong happened while {} node with type {} copy shape inference!'.format(node.name, node.type)
        for new_input_shape, old_input_shape in zip(node.new_input_shapes,
                                                    node.old_input_shapes):
            assert np.array_equal(new_input_shape, old_input_shape), \
                'Something wrong happened while {} node with type {} copy shape inference!'.format(node.name, node.type)

        # We need to use number of connected input ports to avoid errors with numbering
        # in node.ports dictionary, where used numbers of input nodes
        connected_input_ports = []
        for n in node.in_ports():
            if not node.in_port(n).disconnected():
                connected_input_ports.append(node.in_port(n))
        i = len(connected_input_ports)

        # Set all output shapes the same as restored from IR
        for num in node.out_ports():
            if i in node.ports:
                node.out_port(num).data.set_shape(int64_array(
                    node.ports[i][0]))
            else:
                assert node.out_port(num).data.get_shape(
                ) is not None, "Newly added port does not have set shape"
            i += 1
Esempio n. 4
0
 def split_offset(offset_node: Node):
     paired_node = MemoryOffset(offset_node.graph, {'name': offset_node.pair_name, 'splitted': True,
                                                    'pair_name': offset_node.id,
                                                    'element_size': offset_node['element_size'],
                                                    't': offset_node.t,
                                                    'has_default': offset_node.has_default}).create_node()
     offset_node['splitted'] = True
     offset_node.out_port(0).get_connection().set_source(paired_node.out_port(0))
     res_node = Result(offset_node.graph, {'name': offset_node.id + '_output'}).create_node()
     offset_node.out_port(0).connect(res_node.in_port(0))
 def extend(op: Node):
     assert op.has_valid(
         'element_type'
     ), 'Parameter node {} has missed element_type attr!'.format(op.name)
     op['data_type'] = destination_type_to_np_data_type(op.element_type)
     if op.shape == '':
         op.shape = int64_array([])
     else:
         Extender.attr_to_list(op, 'shape')
         if -1 in op.shape:
             op.shape = shape_array([
                 d if d != -1 else dynamic_dimension_value for d in op.shape
             ])
Esempio n. 6
0
    def extend(op: Node):
        for attr in StridedSlice.get_mask_names():
            # We can not use op.has_and_set(attr) here as a condition, because it will return False if begin/end is
            # 1D tensor and begin_mask/end_mask is equal to 0
            if op.has(attr) and op[attr] != '':
                Extender.attr_to_list(op, attr)
            else:
                assert attr not in ['begin_mask', 'end_mask'],\
                    '{} is not defined for the node {}'.format(attr, op.soft_get('name', op.id))
                op[attr] = int64_array([0])

        op.begin_mask = int64_array([1 - i for i in op.begin_mask])
        op.end_mask = int64_array([1 - i for i in op.end_mask])
Esempio n. 7
0
 def extend(op: Node):
     assert op.has_valid(
         'element_type'
     ), 'Parameter node {} has missed element_type attr!'.format(op.name)
     op['data_type'] = destination_type_to_np_data_type(op.element_type)
     if op.shape == '':
         op.shape = int64_array([])
     else:
         Extender.attr_to_list(op, 'shape')
         for i, dim in enumerate(op.shape):
             if dim == -1 or (isinstance(dim, str) and ".." in dim):
                 op.shape[i] = -1
         op.shape = shape_array(
             [d if d != -1 else dynamic_dimension_value for d in op.shape])
Esempio n. 8
0
def replace_with_hsigmoid(graph: Graph, first_node: Node, last_node: Node):
    # determine the input port of first and last nodes which gets the 'input' node output
    add_input_port_idx = int(
        first_node.in_port(0).get_connection().get_source().node.soft_get('op')
        == 'Const')
    last_node_name = last_node.soft_get('name', last_node.id)

    hsigmoid = HSigmoid(graph, {}).create_node()
    hsigmoid.in_port(0).connect(
        first_node.in_port(add_input_port_idx).get_source())
    last_node.out_port(0).get_connection().set_source(hsigmoid.out_port(0))

    rename_nodes([(last_node, last_node_name + '/TBR'),
                  (hsigmoid, last_node_name)])
Esempio n. 9
0
 def extend(op: Node):
     einsum_name = op.soft_get('name', op.id)
     if isinstance(op['equation'], list):
         op['equation'] = ','.join(op['equation'])
     elif not isinstance(op['equation'], str):
         assert False, "Equation of Einsum node {} has incorrect format.".format(
             einsum_name)
Esempio n. 10
0
    def extend(op: Node):
        if not op.has_valid('activations'):
            op['activations'] = None

        mark_input_bins(op, start_port=2)

        op['need_copy_input_blobs'] = True
Esempio n. 11
0
 def attr_restore(node: Node, attribute: str, value=None):
     # Function to restore some specific attr for PriorBox & PriorBoxClustered layers
     if not node.has_valid(attribute):
         node[attribute] = [] if value is None else [value]
     if isinstance(node[attribute], str):
         node[attribute] = []
     else:
         Extender.attr_to_list(node, attribute)
Esempio n. 12
0
    def extend(op: Node):
        def normalize_port_map(port_map: dict):
            for port in port_map:
                for elem in [
                        'axis', 'stride', 'part_size', 'start', 'end',
                        'purpose'
                ]:
                    if port.get(elem) is None:
                        port[elem] = None

        assert op.has(
            'body'), 'There is no "body" attribute in the Loop op {}.'.format(
                op.name)

        # Now op.body is an IREngine, we need to replace it with IREngine.graph
        op.body.graph.graph['cmd_params'] = op.graph.graph['cmd_params']
        op.body.graph.graph['ir_version'] = op.graph.graph['ir_version']
        op.body.graph.name = op.name + '/body'

        for node in op.body.graph.get_op_nodes():
            node['internal_layer_id'] = int(node.id)

        op.body = copy_graph_with_ops(op.body.graph)

        normalize_port_map(op.input_port_map)
        normalize_port_map(op.output_port_map)

        # the 'external_port_id' uses end-to-end numbering of ports, but at this moment it is separate for input and
        # output ports so we need to decrease the output por_id with a number of input ports
        for record in op.output_port_map:
            if record['external_port_id'] != -1:
                record['external_port_id'] -= len(op.in_ports())

        for edge in op.back_edges:
            edge['from_layer'] = edge['from-layer']
            edge['to_layer'] = edge['to-layer']

            edge['to_port'] = 0
            edge['from_port'] = 0

            del (edge['from-layer'])
            del (edge['to-layer'])
def common_backpropdata_extender(op: Node):
    for attr in ['strides', 'output_padding', 'pads_begin', 'pads_end', 'dilations']:
        Extender.attr_to_list(op, attr)

    if op.has_valid('output_padding'):
        op.output_padding = int64_array([0, 0] + op.output_padding)

    dim = len(op.strides)

    if op.has_valid('pads_begin') and op.has_valid('pads_end'):
        pad = [[0, 0], [0, 0]]
        pad.extend([[op.pads_begin[i], op.pads_end[i]] for i in range(dim)])

        op['pad'] = int64_array(pad)

    op['spatial_dims'] = [i + 2 for i in range(dim)]

    if not op.has_valid('dilations'):
        op['dilations'] = [1 for _ in range(dim)]
    if not op.has_valid('strides'):
        op['strides'] = [1 for _ in range(dim)]

    op['dilation'] = int64_array([1, 1] + op.dilations)
    op['stride'] = int64_array([1, 1] + op.strides)

    op['infer'] = backpropdata_infer
Esempio n. 14
0
    def extend(op: Node):
        assert op.has(
            'then_graph'
        ), 'There is no "then_body" attribute in the If op {}.'.format(op.name)
        assert op.has(
            'else_graph'
        ), 'There is no "else_body" attribute in the If op {}.'.format(op.name)
        # Now op.body is an IREngine, we need to replace it with IREngine.graph
        op.then_graph.graph.graph['cmd_params'] = op.graph.graph['cmd_params']
        op.then_graph.graph.graph['ir_version'] = op.graph.graph['ir_version']
        op.then_graph.graph.name = op.name + '/then_body'

        op.else_graph.graph.graph['cmd_params'] = op.graph.graph['cmd_params']
        op.else_graph.graph.graph['ir_version'] = op.graph.graph['ir_version']
        op.else_graph.graph.name = op.name + '/else_body'
        op.then_graph = copy_graph_with_ops(op.then_graph.graph)
        op.else_graph = copy_graph_with_ops(op.else_graph.graph)

        IfExtender.set_input_output_id(op.then_graph, op.then_input_port_map,
                                       op.then_output_port_map)
        IfExtender.set_input_output_id(op.else_graph, op.else_input_port_map,
                                       op.else_output_port_map)
Esempio n. 15
0
    def extend(op: Node):
        assert op.has_valid(
            'element_type'
        ), 'Parameter node {} has missed element_type attr!'.format(op.name)
        op['data_type'] = destination_type_to_np_data_type(op.element_type)
        if op.shape == '':
            op.shape = int64_array([])
        else:
            Extender.attr_to_list(op, 'shape')
            shape = op.shape.copy()
            has_shapes_with_boundaries = False
            for i, dim in enumerate(op.shape):
                if dim == -1 or (isinstance(dim, str) and ".." in dim):
                    shape[i] = -1
                    if ".." in dim:
                        has_shapes_with_boundaries = True
            shape = shape_array([
                d if d not in [-1, '?'] else dynamic_dimension_value
                for d in shape
            ])

            if has_shapes_with_boundaries:
                shape_list = []
                for i, dim in enumerate(op.shape):
                    if not isinstance(dim, str):
                        shape_list.append(dim)
                    else:
                        shape_list.append(parse_dimension(dim))

                # This value is used only for serialization of partial shapes with boundaries
                # for Parameter node.
                # 'user_shape' is not used in shape inference, as propagation of partial shapes with boundaries
                # is not implemented in MO.
                op['user_shape'] = tuple(shape_list)

            # If 'user_shape' is not set, 'shape' attribute is used for serialization.
            # 'shape' is also used for shape inference.
            op.shape = shape
 def extend(op: Node):
     if op.has_valid('classes_index_type'):
         op['classes_index_type'] = destination_type_to_np_data_type(op.classes_index_type)
     if op.has_valid('sequence_length_type'):
         op['sequence_length_type'] = destination_type_to_np_data_type(op.sequence_length_type)
Esempio n. 17
0
 def extend(op: Node):
     if op.get_opset() != "extension":
         op['output_type'] = destination_type_to_np_data_type(
             op.output_type)
Esempio n. 18
0
 def extend(op: Node):
     if op.out_port(0).disconnected():
         op['remove_values_output'] = True
     if op.has_valid('index_element_type'):
         op['index_element_type'] = destination_type_to_np_data_type(
             op.index_element_type)
Esempio n. 19
0
 def extend(op: Node):
     if not op.has_valid('activations'):
         op['activations'] = None
Esempio n. 20
0
 def extend(op: Node):
     op['dst_type'] = destination_type_to_np_data_type(op.destination_type)
     # CompressQuantizeWeights generates IR with constant sub-graph, that should not be ConstFolded:
     #   Const(u8) -> Convert(to fp) -> (some eltwise operations) -> FakeQuantize
     if op.in_node().in_node().soft_get('type') == 'Const':
         op['stop_value_propagation'] = True
Esempio n. 21
0
 def attr_to_list(node: Node, attribute: str):
     if not node.has_valid(attribute):
         log.warning('Attribute {} missed in node {} with type {}!'.format(
             attribute, node.soft_get('name'), node.soft_get('type')))
     elif not isinstance(node[attribute], list):
         node[attribute] = [node[attribute]]
Esempio n. 22
0
 def extend(op: Node):
     if op.has_valid('output_type'):
         op['output_type'] = destination_type_to_np_data_type(
             op.output_type)
Esempio n. 23
0
 def extend(op: Node):
     if not op.has_valid('activations'):
         op['activations'] = None
     op['infer'] = Extender.use_shapes_from_ir