def resolve_minus3(self, shape_node, input_index, reshape_index, dims):
        shape_indexes_node1 = Const(
            shape_node.graph,
            dict(name=shape_node.id + '/ShapeMinus3_index_const1_' +
                 str(input_index),
                 value=int64_array([input_index]))).create_node()
        dims_node1 = get_shape_values_by_indices_node(shape_node,
                                                      shape_indexes_node1)

        shape_indexes_node2 = Const(
            shape_node.graph,
            dict(name=shape_node.id + '/ShapeMinus3_index_const2_' +
                 str(input_index),
                 value=int64_array([input_index + 1]))).create_node()
        dims_node2 = get_shape_values_by_indices_node(shape_node,
                                                      shape_indexes_node2)

        mul_node = Mul(
            shape_node.graph,
            dict(name=shape_node.id + '/MulMinus3_' +
                 str(input_index))).create_node()

        mul_node.in_port(0).connect(dims_node1.out_port(0))
        mul_node.in_port(1).connect(dims_node2.out_port(0))

        input_index = input_index + 2
        reshape_index = reshape_index + 1
        return input_index, reshape_index, dims, mul_node
Esempio n. 2
0
    def replace_sub_graph(graph: Graph, match: dict):
        strided_slice_node = match['strided_slice']
        const_node = match['const']
        reshape_node = match['reshape']
        pack_node = match['pack']

        if not const_node.has_valid('value') or not is_value_is_constant(const_node.value, -1):
            log.debug('The pattern does not correspond to flatten. The second reshape dimension is not -1. It is {}'.
                      format(const_node.soft_get('value')))
            return
        if len(pack_node.in_nodes()) != 2:
            log.debug('The pattern does not correspond to flatten. The "Pack" operation produces tensor with 3 items '
                      'but should produce just 2.')
            return

        expected_values = [0, 1, 1]  # expected values to a StridedSlice to get the batch size
        for ind in range(3):
            if not strided_slice_node.in_node(ind + 1).has_valid('value') or \
                    not is_value_is_constant(strided_slice_node.in_node(ind + 1).value, expected_values[ind]):
                log.debug('The pattern does not correspond to flatten because of the input with index {}. The value is '
                          '"{}".'.format(ind, strided_slice_node.soft_get('value')))
                return

        reshape_node.in_port(1).disconnect()
        reshape_const_node = Const(graph, {'value': int64_array([0, -1]),
                                           'name': reshape_node.soft_get('name', reshape_node.id) + '/shape'}).create_node()
        reshape_node.in_port(1).connect(reshape_const_node.out_port(0))
        reshape_node['special_zero'] = True
        log.debug('The node "{}" is actually a Flatten node'.format(reshape_node.soft_get('name')))
    def find_and_replace_pattern(self, graph: Graph):
        for roll_node in graph.get_op_nodes(op='Roll'):
            if not roll_node.in_port(2).disconnected():
                return
            node_name = roll_node.soft_get('name', roll_node.id)

            # reshape to 1d tensor
            reshape_to_1d = create_op_node_with_second_input(
                graph, Reshape, int64_array([-1]),
                {'name': node_name + '/reshape'})
            roll_node.in_port(0).get_connection().insert_node(reshape_to_1d)

            # add zero const as axes input to roll
            const_zero = Const(graph, {
                'value': int64_array([0]),
                'name': node_name + '/axes'
            }).create_node()
            const_zero.out_port(0).connect(roll_node.in_port(2))

            # reshape to original shape
            shape_of = Shape(graph, {
                'name': node_name + '/shape_of'
            }).create_node()
            reshape_to_1d.in_port(0).get_connection().add_destination(
                shape_of.in_port(0))
            reshape_to_orig_shape = Reshape(graph, {}).create_node()
            rename_nodes([(roll_node, node_name + '/roll'),
                          (reshape_to_orig_shape, node_name)])
            shape_of.out_port(0).connect(reshape_to_orig_shape.in_port(1))
            roll_node.out_port(0).get_connection().insert_node(
                reshape_to_orig_shape)
Esempio n. 4
0
    def swap_pad_and_unsqueeze(self, pad: Node, unsqueeze: Node):
        # insert additional items to the pads in the position specified by the Unsqueeze axis
        unsqueeze_axis = unsqueeze.in_port(1).data.get_value()
        for port_id in [1, 2]:
            current_value = pad.in_port(
                port_id).get_connection().data.get_value()
            new_value_node = Const(
                pad.graph, {
                    'name':
                    pad.soft_get('name', pad.id) + '/value_{}'.format(port_id),
                    'value':
                    shape_insert(current_value, unsqueeze_axis.item(), 0),
                    'override_output_shape':
                    True
                }).create_node()
            pad.in_port(port_id).disconnect()
            pad.in_port(port_id).connect(new_value_node.out_port(0))

        # swap Pad and Unsqueeze layers
        unsqueeze.in_port(0).disconnect()
        pad.in_port(0).get_connection().set_destination(unsqueeze.in_port(0))
        unsqueeze.out_port(0).get_connection().set_source(pad.out_port(0))
        unsqueeze.out_port(0).connect(pad.in_port(0))

        # output shapes of Pad and Unsqueeze changed so need to recalculate them
        pad['override_output_shape'] = True
        unsqueeze['override_output_shape'] = True
Esempio n. 5
0
    def replace_op(self, graph: Graph, node: Node):
        inp0 = node.in_port(0).get_source().node
        inp1 = node.in_port(1).get_source().node

        begin_id = Const(graph, {"value": int64_array([1])}).create_node()
        end_id = Const(graph, {"value": int64_array([2])}).create_node()
        dim1 = StridedSlice(
            graph,
            dict(
                name=inp0.name + "/dim1",
                begin_mask=[1],
                end_mask=[1],
                shrink_axis_mask=[0],
                new_axis_mask=[0],
                ellipsis_mask=[0],
            ),
        ).create_node([inp1, begin_id, end_id])

        rows = Div(graph, dict(name=node.name + "/rows")).create_node([inp0, dim1])

        inp0 = Cast(
            graph, dict(name=inp0.name + "/fp32", dst_type=np.float32)
        ).create_node([inp0])
        dim1 = Cast(
            graph, dict(name=dim1.name + "/fp32", dst_type=np.float32)
        ).create_node([dim1])
        cols = FloorMod(graph, dict(name=node.name + "/cols")).create_node([inp0, dim1])
        cols = Cast(
            graph, dict(name=cols.name + "/i64", dst_type=np.int64)
        ).create_node([cols])

        concat = PackOp(graph, dict(name=node.name + "/merged", axis=0)).create_node(
            [rows, cols]
        )
        return [concat.id]
Esempio n. 6
0
    def normalize_body_graph(loop_node: Node):
        loop_name = loop_node.soft_get('name', loop_node.id)
        # connect "trip count" input if it is not connected with default value "Infinity" (-1)
        if not loop_node.is_in_port_connected(0):
            loop_node.add_input_port(0, skip_if_exist=True)
            Const(loop_node.graph, {'name': loop_name + '/trip_count', 'value': int64_array(-1)}).\
                create_node().out_port(0).connect(loop_node.in_port(0))

        # connect "execution condition" input if it is not connected with default value True
        if not loop_node.is_in_port_connected(1):
            loop_node.add_input_port(1, skip_if_exist=True)
            Const(loop_node.graph, {'name': loop_name + '/execution_cond', 'value': np.array(True, dtype=np.bool)}).\
                create_node().out_port(0).connect(loop_node.in_port(1))

        # scan output need Unsqueeze over axis 0
        for record in loop_node.output_port_map:
            body_node = Loop.get_body_node_by_internal_id(loop_node, record['internal_layer_id'])
            assert body_node is not None
            assert body_node.soft_get('type') == 'Result'

            if record['axis'] is not None:
                unsqueeze = create_op_with_const_inputs(loop_node.body, Unsqueeze, {1: int64_array([0])})
                body_node.in_port(0).get_connection().insert_node(unsqueeze)

        Loop.normalize_input_output_ports(loop_node)
Esempio n. 7
0
def create_bias_node(graph: Graph, src_node):
    logger.debug('Creating new bias for {}'.format(src_node.name))
    destination_ports = []
    for dest_port in src_node.out_port(0).get_destinations():
        destination_ports.append(dest_port)

    # Create Add and constant with zero bias
    bias_shape = src_node.out_port(0).data.get_shape()
    add_bias_shape = [1] * len(bias_shape)
    add_bias_shape[1] = bias_shape[1]
    weights = get_weights_for_node(src_node)
    bias_dtype = np.float32
    if weights and weights.out_port(0).is_data_type_defined():
        bias_dtype = weights.out_port(0).get_data_type()
    add_bias = Const(
        graph, {
            'value': np.zeros(add_bias_shape, dtype=bias_dtype),
            'shape': add_bias_shape,
            'need_shape_inference': True
        }).create_node()
    add_op = Add(graph, {
        'name': src_node.name + '/add_',
        'need_shape_inference': True
    }).create_node()

    # Connect Const to Add node
    add_op.in_port(1).connect(add_bias.out_port(0))

    # Reconnect src_node -> output to src_node -> Add -> output
    src_node.out_port(0).disconnect()
    src_node.out_port(0).get_connection().set_destination(add_op.in_port(0))

    for destination_port in destination_ports:
        add_op.out_port(0).connect(destination_port)
    add_bias.out_node(0)['Insert_Convert_operation_after'] = True
Esempio n. 8
0
 def extract(cls, node):
     attrs = {
         'data_type': node.value.dtype,
         'value': node.value,
     }
     Const.update_node_stat(node, attrs)
     return cls.enabled
Esempio n. 9
0
 def replace_pattern(self, graph: Graph, match: dict):
     """
     Adds Normalize layer weights, which are required by Inference Engine, 
     but do not always exist in MXNet model. 
     
     L2Normalization is mapped to Normalize layer
     so we need to generate Normalize weights filled with ones.
     
     Parameters
     ----------
     graph : Graph
        Graph with loaded model.
      match : dict
        Patterns which were found in graph structure.
     """
     l2_normalization_node = match['l2_normalization']
     if len(l2_normalization_node.in_nodes()) < 2:
         value = np.full([l2_normalization_node.in_node(0).shape[1]],
                         1.0,
                         dtype=np.float32)
         weights_node = Const(
             graph,
             dict(name=l2_normalization_node['name'] + '_weights',
                  value=value)).create_node()
         l2_normalization_node.add_input_port(1)
         l2_normalization_node.in_port(1).connect(weights_node.out_port(0))
         l2_normalization_node.in_port(1).bin = 'weights'
    def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):
        node = match['reduce']
        connected_in_ports = [
            port for port in node.in_ports().values()
            if not port.disconnected()
        ]
        if len(connected_in_ports) == 1:
            node_name = node.soft_get('name', node.id)

            # if the 'axis' is None then we still add a second input to the layer with a 1D array with 1 element equal
            # to None. The infer function handles this case because the input shape is known at this stage only
            if node.has_valid('axis'):
                const = Const(graph, {
                    'name': node_name + '/axis',
                    'value': node.axis
                }).create_node()
                node.add_input_port(1, skip_if_exist=True)
                const.out_port(0).connect(node.in_port(1))
                del graph.node[node.id]['axis']
            else:
                # The default (if there is no 'axis') is to reduce over all the dimensions of the input tensor.
                axes = create_op_with_const_inputs(
                    graph, Range, {
                        0: int64_array(0),
                        2: int64_array(1)
                    }, dict(name=node_name + '/axes'))
                end_of_range = Rank(graph, dict(name=node_name +
                                                '/range_end')).create_node()
                node.in_port(0).get_connection().get_source().connect(
                    end_of_range.in_port(0))
                end_of_range.out_port(0).connect(axes.in_port(1))

                node.add_input_port(1, skip_if_exist=True)
                axes.out_port(0).connect(node.in_port(1))
    def add_unsqueeze_for_new(graph: Graph, ss_node: Node):
        log.info(
            "StridedSlice op with new axis mask '{}' has been detected".format(
                ss_node.id))
        if len(ss_node.in_nodes()) != 4 or len(ss_node.out_nodes()) != 1:
            return

        shape_out = ss_node.out_node().shape
        dim = mo_array(range(len(ss_node['new_axis_mask'])))[mo_array(
            ss_node['new_axis_mask'], dtype=bool)]
        ss_shape = []
        for i in range(0, len(ss_node['new_axis_mask'])):
            if not ss_node['new_axis_mask'][i]:
                ss_shape.append(shape_out[i])
            else:
                ss_node['new_axis_mask'][i] = 0

        ss_node.out_port(0).data.set_shape(ss_shape)

        # insert Unsqueeze
        unsqueeze_node = Unsqueeze(graph,
                                   dict(name=ss_node.name +
                                        '/Unsqueeze_new')).create_node()
        ss_node.out_port(0).get_connection().insert_node(unsqueeze_node)
        unsqueeze_node.out_port(0).data.set_shape(shape_out)

        dims_node = Const(graph, {
            'name': unsqueeze_node.id + '/Indices',
            'value': int64_array(dim)
        }).create_node()
        dims_node.out_port(0).connect(unsqueeze_node.in_port(1))
Esempio n. 12
0
    def replace_pattern(self, graph: Graph, match: dict):
        bias_add = match['BiasAdd']

        # Replace BiasAdd by Add operation
        new_add = Add(graph, {'name': bias_add.id + '/Add'}).create_node()

        bias_add.in_port(0).get_connection().set_destination(new_add.in_port(0))
        bias_add.in_port(1).get_connection().set_destination(new_add.in_port(1))
        bias_add.out_port(0).get_connection().set_source(new_add.out_port(0))

        if bias_add.data_format != 'NCHW':
            return

        input_shape = new_add.in_port(0).data.get_shape()
        bias_shape = new_add.in_port(1).data.get_shape()
        assert len(bias_shape) == 1

        unsqueeze_dims = np.arange(len(input_shape))
        channel_dim = get_features_dim('NCHW', len(input_shape))
        unsqueeze_dims = np.delete(unsqueeze_dims, channel_dim, 0)

        unsqueeze_node = Unsqueeze(graph, {'name': new_add.id + '/BiasUnsqueeze'}).create_node()
        unsqueeze_dims_node = Const(graph, {'name': new_add.id + '/Dims',
                                            'value': unsqueeze_dims}).create_node()
        # Reconnecting nodes
        unsqueeze_node.in_port(1).connect(unsqueeze_dims_node.out_port(0))
        unsqueeze_node['override_output_shape'] = True

        new_add.in_port(1).get_connection().insert_node(unsqueeze_node)
Esempio n. 13
0
def reorder_inputs_for_shape_or_slice(op_node: Node, input_port: int,
                                      permute_indices_for_gather: list):
    """
    axis and slice permutations are almost the same the only difference is that for slice in general
    case permutation depends from slice_rank not from input_rank or output_rank
    """
    graph = op_node.graph
    data_node = op_node.in_node(input_port)

    gather_name = op_node.soft_get('name', op_node.id) + '/ShapeGather'
    const = Const(
        graph, {
            'value': permute_indices_for_gather,
            'name': gather_name + '/const',
            'need_shape_inference': True
        }).create_node_with_data()
    axis_const = Const(graph, {
        'value': int64_array(0),
        'name': gather_name + '/axis'
    }).create_node_with_data()
    gather = Gather(graph, {
        'name': gather_name,
        'need_shape_inference': True
    }).create_node_with_data([data_node, const, axis_const])
    attrs = graph.get_edge_data(data_node.id, op_node.id, key=0).copy()

    graph.add_edge(gather.id, op_node.id, **attrs)
    graph.remove_edge(data_node.id, op_node.id)

    # need to run manually to override output shape value to resolve shape collision for nodes with
    # 'correct_data_layout' output port attrs
    op_node['need_shape_inference'] = True
Esempio n. 14
0
    def find_and_replace_pattern(self, graph: Graph):
        for attr_pad in graph.get_op_nodes(op='AttributedPad'):
            # save the original node name to use it in the new Pad op instance
            original_name = attr_pad.soft_get('name', attr_pad.id)

            new_pad = Pad(graph, {
                'mode': attr_pad.soft_get('mode', None),
            }).create_node()
            rename_nodes([(attr_pad, original_name + '/to_be_removed'),
                          (new_pad, original_name)])

            attr_pad.in_port(0).get_connection().set_destination(
                new_pad.in_port(0))
            new_pad.in_port(1).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 0]
                }).create_node().out_port(0))
            new_pad.in_port(2).connect(
                Const(graph, {
                    'value': attr_pad.pads[:, 1]
                }).create_node().out_port(0))
            if attr_pad.soft_get('mode') == 'constant':
                # create Constant node of proper data type (equal to the data type of the Pad first input)
                convert_pad_value = create_op_with_const_inputs(
                    graph, ConvertLike, {0: attr_pad.fill_value},
                    {'name': original_name + '/pad_value_convert'})
                convert_pad_value.in_port(1).connect(
                    new_pad.in_port(0).get_source())
                new_pad.in_port(3).connect(convert_pad_value.out_port(0))

            attr_pad.out_port(0).get_connection().set_source(
                new_pad.out_port(0))
            graph.remove_node(attr_pad.id)
 def replace_pattern(self, graph: Graph, match: [str, Node]):
     node = match['transpose']
     assert len(node.in_nodes()) == 1
     order = np.arange(len(node.in_port(0).data.get_shape()))[::-1]
     const = Const(graph, {'value': order, 'name': node.soft_get('name', node.id) + '/Order'}).create_node()
     node.add_input_port(1, skip_if_exist=True)
     const.out_port(0).connect(node.in_port(1))
     node['reverse_order'] = False
Esempio n. 16
0
    def transform_map_fn_output_concatenation(external_match: dict,
                                              internal_match: dict):
        """
        Transforms TensorFlow 2 output concatenation into use of axis attribute for output port of Loop node
        :param external_match: a match used for handling a part of the main graph responsible for output concatenation
        :param internal_match: a match used for handling a part of the body graph responsible for output concatenation
        """
        loop_node = external_match['while']
        stack_node = external_match['stack']
        list_reserve_node = external_match['reserve']
        body_graph = loop_node['body']

        tensor_list_set_item_node = internal_match['concatenation']
        tensor_list_set_item_node_name = tensor_list_set_item_node.soft_get(
            'name', tensor_list_set_item_node.id)
        list_result_node = internal_match['concatenation_result']

        # replace TensorListSetItem with Unsqueeze and use axis attribute for corresponding Result node
        # to concatenate results from different iterations
        unsqueeze_list_element = create_op_with_const_inputs(
            body_graph, Unsqueeze, {1: int64_array(0)},
            {'name': 'TensorListSetItemUnsqueeze'})
        tensor_list_set_item_node.in_port(2).get_connection().set_destination(
            unsqueeze_list_element.in_port(0))
        tensor_list_set_item_node.out_port(0).get_connection().set_source(
            unsqueeze_list_element.out_port(0))
        rename_nodes([(tensor_list_set_item_node,
                       tensor_list_set_item_node_name + '/AbandonedName'),
                      (unsqueeze_list_element, tensor_list_set_item_node_name)
                      ])
        list_result_node_layer_id = list_result_node.internal_layer_id
        Loop.update_port_map_value_ext(loop_node.output_port_map,
                                       'internal_layer_id',
                                       list_result_node_layer_id, 'axis', 0)

        # remove TensorListStack to by-pass the node since the result from the Loop node is already concatenated
        stack_node.out_port(0).get_connection().set_source(
            stack_node.in_port(0).get_connection().get_source())

        # disconnect ListReserve node because it is no longer needed for Loop
        list_reserve_node.out_port(0).disconnect()

        # connect a number of iterations with trip count that can be received from the second input of ListReserve
        # create a constant network with True value for execution_condition so that IE can ignore execution condition
        # and perform trip_counts iterations. This approach with known trip count value allows to avoid dynamism.
        loop_node.in_port(1).disconnect()
        list_reserve_node.in_port(1).get_source().connect(loop_node.in_port(1))
        for record in loop_node.output_port_map:
            if 'purpose' in record and record[
                    'purpose'] == 'execution_condition':
                exec_cond_layer_id = record['internal_layer_id']
                exec_cond_node = Loop.get_body_node_by_internal_id(
                    loop_node, exec_cond_layer_id)
                const_true = Const(body_graph, {
                    'value': np.array(True, dtype=np.bool)
                }).create_node()
                exec_cond_node.in_port(0).get_connection().set_source(
                    const_true.out_port(0))
Esempio n. 17
0
    def replace_op(self, graph: Graph, node: Node):
        weight = node.module.weight.detach().numpy()
        bias = node.module.bias.detach().numpy()

        weight = Const(graph, {'value': weight}).create_node()
        bias = Const(graph, {'value': bias}).create_node()
        matmul = MatMul(graph, dict(name=node.name)).create_node([node.in_node(0), weight])
        matmul = Add(graph, dict(name=node.name + '/bias')).create_node([matmul, bias])
        return [matmul.id]
Esempio n. 18
0
 def replace_sub_graph(self, graph: Graph, match: dict):
     node = match['op']
     if node.has_valid('reps'):
         tile_array = Const(
             graph,
             dict(value=int64_array(node.reps),
                  symbol_dict={'name':
                               node.id + '/tile_array'})).create_node()
         node.in_port(1).get_connection().set_source(tile_array.out_port(0))
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        pair_node = Node(graph, node.pair_name)

        if node.t >= 0:
            raise Error('Does not support IfDefined with t > 0')

        if node.in_port(0).get_source() is not None:
            input_port = node.in_port(0).get_source()
            op_output_id = node.out_port(0).get_destination().node.id
            out_port = pair_node.out_port(0)
            node_name = node.name
            pair_name = pair_node.name
        else:
            input_port = pair_node.in_port(0).get_source()
            op_output_id = pair_node.out_port(0).get_destination().node.id
            out_port = node.out_port(0)
            node_name = pair_node.name
            pair_name = node.name

        in_shape = input_port.data.get_shape()
        node_t = abs(node.t)

        init_value_memory_out = Const(graph, {'name': 'init_value_' + pair_name,
                                              'value': np.zeros(int64_array([in_shape[0], in_shape[1]*node_t]), dtype=np.float32),
                                              'shape': int64_array([in_shape[0], in_shape[1]*node_t])}).create_node()
        memory_out = ReadValue(graph, {'name': pair_name, 'variable_id': node_name+pair_name}).create_node()
        init_value_memory_out.out_port(0).connect(memory_out.in_port(0))

        if node_t > 1:
            crop_concat = Crop(graph, {'name': 'Memory_crop', 'dim': mo_array([in_shape[1]*(node_t-1)]),
                                       'offset': mo_array([in_shape[1]]), 'axis': mo_array([1])}).create_node()
            memory_out.out_port(0).connect(crop_concat.in_port(0))
            concat = Concat(graph, {'name': 'Memory_concat'}).create_node()
            concat.add_sequence_of_ports('in', range(2))
            crop_concat.out_port(0).connect(concat.in_port(0))
            concat.in_port(1).connect(input_port)

            memory_in = Assign(graph, {'name': node_name, 'variable_id': node_name + pair_name}).create_node()
            concat.out_port(0).connect(memory_in.in_port(0))
            out = Result(graph, {'name': 'Memory_output'}).create_node()
            memory_in.out_port(0).connect(out.in_port(0))

            crop_out = Crop(graph, {'name': 'Memory_crop_out', 'dim': mo_array([in_shape[1]]),
                                    'offset': mo_array([0]), 'axis': mo_array([1])}).create_node()
            memory_out.out_port(0).connect(crop_out.in_port(0))
            out_port.get_connection().set_source(crop_out.out_port(0))
        else:
            memory_in = Assign(graph, {'name': node_name, 'variable_id': node_name + pair_name}).create_node()
            memory_in.in_port(0).connect(input_port)
            out = Result(graph, {'name': 'Memory_output'}).create_node()
            memory_in.out_port(0).connect(out.in_port(0))
            out_port.get_connection().set_source(memory_out.out_port(0))

        graph.remove_node(op_output_id)
        graph.remove_node(node.id)
        graph.remove_node(pair_node.id)
Esempio n. 20
0
    def add_output_reshape(graph: Graph, match: dict):
        """
        Since MXNet Y output shape is [batch_size, seq_len, hidden_size * num_directions] we need to add reshape
        from above common format [batch_size, num_directions, seq_len, hidden_size] to MXNet format.
        """
        lstm = match['rnn_layer']
        input = match['input']
        if not lstm.has_num_directions:
            return
        old_data_node = lstm.out_node(0)
        num_directions = 2 if lstm.direction in ['bidirectional'] else 1
        mxnet_shape = lstm.out_node(0).shape.copy()

        if lstm.batch_dim == 0:
            mo_shape = shape_array([
                input.shape[lstm.batch_dim], input.shape[lstm.sequence_dim],
                lstm.hidden_size
            ])
        else:
            mo_shape = shape_array([
                input.shape[lstm.sequence_dim], input.shape[lstm.batch_dim],
                lstm.hidden_size
            ])

        if lstm.has_num_directions:
            mo_shape = shape_insert(mo_shape, 1, np.int64(num_directions))

        lstm_name = lstm.soft_get('name', lstm.id)

        new_data = Op._create_data_node(graph,
                                        name=lstm_name +
                                        '/Data/Reshape_mxnet/',
                                        attrs={'shape': mo_shape})
        graph.remove_edge(lstm.id, old_data_node.id)
        graph.add_edge(lstm.id, new_data.id, key=0, out=0)

        # Add Transpose
        permute_order = Const(
            graph, {
                'name': lstm_name + '/Transpose_mxnet_order',
                'value': int64_array([0, 2, 1, 3])
            }).create_node_with_data()
        permute_data = Transpose(graph, {
            'name': lstm_name + '/Transpose_mxnet/'
        }).create_node_with_data([new_data, permute_order])

        # Add Reshape
        reshape = Reshape(graph, {'name': lstm_name + '/Reshape_mxnet/'})
        reshape_dim_data = Const(
            graph, {
                'name': lstm_name + '/Reshape_mxnet_dim',
                'value': int64_array(unmask_shape(mxnet_shape))
            }).create_node_with_data()

        reshape.create_node_with_data([permute_data, reshape_dim_data],
                                      dict(),
                                      data_nodes=[old_data_node])
Esempio n. 21
0
    def replace_op(self, graph: Graph, node: Node):
        pow_2 = Const(graph, {'value': np.float32(2.0)}).create_node()
        reduce_axis = Const(graph, {'value': np.int32(-1)}).create_node()
        pow_0_5 = Const(graph, {'value': np.float32(0.5)}).create_node()

        sq = Pow(graph, dict(name=node.in_node(0).name + '/sq', power=2.0)).create_node([node.in_node(0), pow_2])
        sum = ReduceSum(graph, dict(name=sq.name + '/sum')).create_node([sq, reduce_axis])
        sqrt = Pow(graph, dict(name=sum.name + '/sqrt', power=0.5)).create_node([sum, pow_0_5])
        return [sqrt.id]
Esempio n. 22
0
def create_ss_interval_border(graph: Graph, slice_border_port: Port,
                              shape: np.ndarray, axes: np.ndarray,
                              node_name: str):
    """
    This function creates "begin"/"end" parameters for the StridedSlice based on Slice's "starts"/"ends"

    :param graph: graph to operate on.
    :param slice_border_port: node output port that provides "starts"/"ends" values for the Slice.
    :param shape: input shape of the Slice
    :param axes: axes that "starts" and "ends" apply to
    :param node_name: Slice node name
    :return: Concat node that forms "begin"/"end" values for the StridedSlice
    """
    # the value for 'starts' or 'ends' might be maximum/minimum possible value of int64. This
    # value must be converted to maximum/minimum of int32 because such big values do not fit into the int32 which is
    # supported by the StridedSlice layer
    clamp = create_op_with_const_inputs(graph,
                                        Clamp,
                                        port_value_dict={
                                            1: np.iinfo(np.int32).min,
                                            2: np.iinfo(np.int32).max
                                        },
                                        op_attrs=dict(name=node_name +
                                                      '/Clamp'))
    clamp.in_port(0).connect(slice_border_port)
    # we have to convert "starts"/"ends" values from the network to one data type with constant values that are created
    # here to prevent type errors in Concat node
    cast = Cast(graph, dict(name=node_name + '/CastToI64',
                            dst_type=np.int64)).create_node()
    cast.in_port(0).connect(clamp.out_port(0))
    concat = Concat(graph, dict(name=node_name + '/Concat',
                                axis=0)).create_node()
    for value_idx, port_idx in enumerate(axes):
        concat.add_input_port(port_idx)
        # "axes" may not be sorted, so we need to split "starts"/"ends" values and connect each value to the correct
        # Concat input port
        value = create_op_with_const_inputs(
            graph,
            Gather,
            port_value_dict={
                1: int64_array([value_idx]),
                2: int64_array(0)
            },
            op_attrs={'name': node_name + '/Gather'})
        cast.out_port(0).connect(value.in_port(0))
        value.out_port(0).connect(concat.in_port(port_idx))
    for port_idx in range(len(shape)):
        if not concat.is_in_port_connected(port_idx):
            concat.add_input_port(port_idx)
            # This border value would be ignored in StridedSlice because of the begin_mask\end_mask
            const = Const(
                graph, dict(name=node_name + '/Const',
                            value=int64_array([0]))).create_node()
            const.out_port(0).connect(concat.in_port(port_idx))

    return concat
 def replace_sub_graph(graph: Graph, match: dict):
     node = match['op']
     for port_index, value_attr, attrs in node['embedded_inputs']:
         const = Const(graph, dict(value=node[value_attr])).create_node()
         node.add_input_port(port_index, skip_if_exist=True)
         const.out_port(0).connect(node.in_port(port_index))
         node.in_port(port_index).bin = attrs['bin']
         node.in_port(port_index).in_attrs.append('bin')
         del node[value_attr]
     del node['embedded_inputs']
Esempio n. 24
0
    def extract(cls, node):
        pb_value = onnx_attr(node, 'value', 't')
        value = numpy_helper.to_array(pb_value)

        attrs = {
            'data_type': value.dtype,
            'value': value,
        }
        Const.update_node_stat(node, attrs)
        return cls.enabled
Esempio n. 25
0
def input_as_const(node: Node, attrs: dict, port: int, bin: str, value: np.ndarray):
    """
    Inserts constant node on input `port` of `node` with `values` and `attrs`. Marks input edge with bin `attribute`
    """
    graph = node.graph
    const = Const(graph, {'value': value, **attrs}).create_node()
    node.add_input_port(port, skip_if_exist=True)
    const.out_port(0).connect(node.in_port(port))
    node.in_port(port).bin = bin
    node.in_port(port).in_attrs.append('bin')
Esempio n. 26
0
 def extract(cls, node):
     pb_tensor = node.pb.attr["value"].tensor
     shape = tf_tensor_shape(pb_tensor.tensor_shape)
     attrs = {
         'shape': shape,
         'value': tf_tensor_content(pb_tensor.dtype, shape, pb_tensor),
         'data_type': tf_dtype_extractor(pb_tensor.dtype),
     }
     Const.update_node_stat(node, attrs)
     return cls.enabled
Esempio n. 27
0
    def replace_op(self, graph: Graph, node: Node):
        mode = node.module.mode
        if mode.endswith('linear'):  # like bilinear or trilinear
            mode = 'linear'
        align_corners = node.module.align_corners

        if mode == 'linear':
            height = node.module.size[0] if node.module.size is not None else -1
            width = node.module.size[1] if node.module.size is not None else -1
            dims = node.module.dims
            axes = np.arange(2, dims)
            pads = np.zeros(dims, dtype=np.int32)
            scales = np.repeat(node.module.scale_factor, dims - 2).astype(np.float32)
            attrs = {
                'name': node.name,
                'version': 'opset4',
                'height': height,
                'width': width,
                'mode': mode,
                'axes': axes,
                'pads_begin': pads,
                'pads_end': pads,
                'coordinate_transformation_mode': 'align_corners' if align_corners else 'half_pixel',
                'shape_calculation_mode': 'sizes' if node.module.size is not None else 'scales',
            }

            sizes = Const(graph, {'value': np.array([height, width])}).create_node()
            axes = Const(graph, {'value': axes}).create_node()
            scales = Const(graph, {'value': scales}).create_node()
            interp = Interpolate(graph, attrs).create_node([node.in_node(0), sizes, scales, axes])
        else:
            if node.module.size:
                attrs = {
                    'name': node.name,
                    'version': 'opset1',
                    'height': node.module.size[0],
                    'width': node.module.size[1],
                    'mode': mode,
                    'axes': [2, 3],
                    'align_corners': node.module.align_corners,
                }
                interp = Interpolate(graph, attrs).create_node([node.in_node(0)])
            else:
                if not node.module.scale_factor:
                    raise Error('No scale_factor found')
                attrs = {
                    'name': node.name,
                    'height_scale': np.float(node.module.scale_factor),
                    'width_scale': np.float(node.module.scale_factor),
                    'mode': mode,
                    'align_corners': node.module.align_corners,
                }
                interp = UpsampleOp(graph, attrs).create_node([node.in_node(0)])

        return [interp.id]
Esempio n. 28
0
    def replace_sub_graph(self, graph: Graph, match: dict):
        node = match['op']
        if not node.has_valid('start') or not node.has_valid(
                'stop') or not node.has_valid('step'):
            return

        start_value = Const(
            graph,
            dict(value=node.start,
                 symbol_dict={'name':
                              node.id + '/const_start'})).create_node()
        limit_value = Const(
            graph,
            dict(value=node.stop,
                 symbol_dict={'name':
                              node.id + '/const_limit'})).create_node()
        delta_value = Const(
            graph,
            dict(value=node.step,
                 symbol_dict={'name':
                              node.id + '/const_delta'})).create_node()
        node.in_port(0).get_connection().set_source(start_value.out_port(0))
        node.in_port(1).get_connection().set_source(limit_value.out_port(0))
        node.in_port(2).get_connection().set_source(delta_value.out_port(0))
        if node.has_valid('repeat') and node.repeat > 1:
            rep = MXRepeat(
                graph,
                dict(name=node.id + '/mxrepeat', axis=0,
                     repeats=node.repeat)).create_node()
            node.out_port(0).get_destination().get_connection().set_source(
                rep.out_port(0))
            rep.in_port(0).connect(node.out_port(0))
 def replace_sub_graph(self, graph: Graph, match: [dict, SubgraphMatch]):
     node = match['reshape']
     connected_in_ports = [port for port in node.in_ports().values() if not port.disconnected()]
     if len(connected_in_ports) == 1:
         if node.has('dim'):
             const = Const(graph, {'value': node.dim}).create_node()
             node.add_input_port(1, skip_if_exist=True)
             const.out_port(0).connect(node.in_port(1))
             del node['dim']
         else:
             raise Error('The `dim` attribute for node {} is not set'.format(node.op))
Esempio n. 30
0
 def replace_op(self, graph: Graph, node: Node):
     const = Const(
         graph,
         dict(value=mo_array(-1.),
              name=node.name + '/reciprocal_pow_const_')).create_node()
     reciprocal = Pow(graph, {
         'name': node.name + '/reciprocal_pow_'
     }).create_node()
     node.in_port(0).get_connection().set_destination(reciprocal.in_port(0))
     const.out_port(0).connect(reciprocal.in_port(1))
     return [reciprocal.id]