def replace_sub_graph(self, graph: Graph, match: dict):
        """
        Need to find the pattern: SoftmaxActivation -> DetectionOutput
        DetectionOutput in IE expects flattened input from SoftMax, that is why there is the need to add
        Flatten layer

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
         match : dict
           Patterns which were found in graph structure.
        """
        softmax_activation = match['softmax_activation']
        multi_box_detection = match['multi_box_detection']
        softmax_activation['axis'] = -1
        edge_data = graph.get_edge_data(softmax_activation.id,
                                        multi_box_detection.id)
        out_port = edge_data[0]['out']
        in_port = edge_data[0]['in']
        graph.remove_edge(softmax_activation.id, multi_box_detection.id)
        new_reshape_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([0, -1]),
            dict(op='Reshape', name=multi_box_detection.name + '/Reshape_'),
            softmax_activation)
        graph.create_edge(new_reshape_node,
                          multi_box_detection,
                          in_port=in_port,
                          out_port=out_port)
Exemplo n.º 2
0
    def add_input_data_to_prior_boxes(graph: Graph, input_names: str = ''):
        """
        PriorBox layer has data input unlike mxnet.
        Need to add data input to _contrib_MultiBoxPrior for
        for correct conversion to PriorBox layer.

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
        """
        if not input_names:
            input_names = ('data', )
        else:
            input_names = input_names.split(',')

        input_nodes = {}
        for node in graph.nodes():
            node = Node(graph, node)
            if node.has_valid('op') and node.name in input_names:
                input_nodes.update({node.id: node})

        if len(input_nodes) > 0:
            for node in graph.nodes():
                node = Node(graph, node)
                if node.has_valid(
                        'op') and node.op == '_contrib_MultiBoxPrior':
                    node.add_input_port(idx=1)
                    graph.create_edge(list(input_nodes.values())[0],
                                      node,
                                      out_port=0,
                                      in_port=1)
Exemplo n.º 3
0
    def replace_sub_graph(graph: Graph, match: dict, **kwargs):
        inputs_dict = {}
        for u, v, edge_attrs in graph.out_edges(match['queue_deque'].id,
                                                data=True):
            out_port = edge_attrs['out']
            shape = match['fifo_queue'].shapes[out_port]
            if out_port not in inputs_dict:
                input_op = Parameter(graph, {'shape': shape.copy()})
                inputs_dict[out_port] = input_op.create_node([])
            graph.create_edge(inputs_dict[out_port], Node(graph, v),
                              edge_attrs['out'], edge_attrs['in'], edge_attrs)

        graph.remove_node(match['queue_deque'].id)
        graph.remove_node(match['fifo_queue'].id)
Exemplo n.º 4
0
    def replace_sub_graph(graph: Graph, match: dict, **kwargs):
        r"""
        Usually graph looks like:

          main_graph
            ...             Result
             |                 |
        image_batch      label_batch
                \        /
                batch_join
                /        \
        placeholder      fifo_queue

        Replacer works for both cases (that's why we have loop - 68 line):
            label_batch was marked as output
            there is no label_batch node
        """
        true_placeholder_shape = match['placeholder'].shape
        placeholder_shape = match['fifo_queue'].shapes[0]
        placeholder_data_type = match['fifo_queue'].types[0]
        # in case OOB conversion batch_size placeholder shape is not required
        # so use a shape specified in FIFOQueueV2 shapes list attribute
        assert true_placeholder_shape is None or true_placeholder_shape.ndim <= 1
        if true_placeholder_shape is not None and true_placeholder_shape.ndim == 1 and len(
                true_placeholder_shape) > 1:
            log.warning(
                'Placeholder \'{}\' got non 0-dimensional shape {} in FIFOQueue pattern. Placeholder will have the '
                'same shape after folding the pattern instead of {} shape which is original for the network.'
                ''.format(match['placeholder'].id, true_placeholder_shape,
                          placeholder_shape))
            placeholder_shape = true_placeholder_shape
        placeholder_name = match['fifo_queue'].name
        graph.erase_node(match['fifo_queue'])
        graph.erase_node(match['placeholder'])
        for _, out in match['batch_join'].out_nodes().items():
            if out.id != match['image_batch'].id:
                if out.out_node().op == 'Result':
                    graph.remove_node(out.out_node().id)
                graph.remove_node(out.id)
        graph.remove_node(match['batch_join'].id)
        placeholder = Parameter(
            graph, {
                'name': placeholder_name,
                'shape': placeholder_shape,
                'data_type': placeholder_data_type
            }).create_node()
        graph.create_edge(placeholder, match['image_batch'])
        log.info(
            "FIFOQueueV2 pattern was detected. New shape of placeholder {} is {}. Use -b to set batch size if "
            "needed".format(placeholder.id, placeholder['shape']))
    def replace_op(self, graph: Graph, node: Node):
        if node.has_and_set('inputs_preprocessed'):
            log.debug('Node "{}" has already been preprocessed'.format(
                node.soft_get('name')))
            return []
        # reshape tensor with batch indices to 2d
        unsqueeze_node = create_op_node_with_second_input(
            graph, Unsqueeze, int64_array([1]),
            {'name': node.name + '/Unsqueeze'}, node.in_node(2))

        convert_node = Cast(
            graph, {
                'name':
                unsqueeze_node.name + '/ToFloat',
                'dst_type':
                data_type_str_to_np(graph.graph['cmd_params'].data_type)
            }).create_node()

        convert_node.in_port(0).connect(unsqueeze_node.out_port(0))

        concat_op = Concat(
            graph, {
                'axis': 1,
                'name': node.name + '/concat_batch_indices_and_boxes',
                'in_ports_count': 2
            })
        concat_node = concat_op.create_node([convert_node, node.in_node(1)])

        # do not remove edge with crop_size because it is needed in the partial infer
        graph.remove_edge(node.in_node(1).id, node.id)

        # input to the CropAndResize contains boxes coordinates in YXYX layout. But IE layer ROIPooling expects
        # coordinates in the XYXY layout, so convolution is added here to swap coordinates
        swapped_box_coordinates_node = add_convolution_to_swap_xy_coordinates(
            graph, concat_node, 5)

        # reshape locations tensor to 2D so it could be passed to Eltwise which will be converted to ScaleShift
        reshape_2d_node = create_op_node_with_second_input(
            graph, Reshape, int64_array([-1, 5]),
            dict(name=swapped_box_coordinates_node.id + '/reshape_2d_'),
            swapped_box_coordinates_node)
        graph.create_edge(reshape_2d_node, node, 0, 1)

        # do not replace any output edge
        return []
    def replace_sub_graph(self, graph: Graph, match: dict):
        """
        Need to find each occurrence of pattern:
        transpose -> SoftmaxActivation -> _contrib_MultiBoxDetection
        remove transpose layer to secure the order of weights in SoftMax to be the same as IE expects
        IE expects weights to be in following order: class-wise values for each priorbox.
        priorboxes change the quickest

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
         match : dict
           Patterns which were found in graph structure.
        """
        transpose_node = match['transpose']
        softmax_activation = match['softmax_activation']
        transpose_in_node = transpose_node.in_node(0)

        graph.remove_edge(transpose_in_node.id, transpose_node.id)
        graph.remove_edge(transpose_node.id, softmax_activation.id)
        graph.remove_node(transpose_node.id)
        graph.create_edge(transpose_in_node, softmax_activation)
    def replace_pattern(graph: Graph, match: dict):
        """
        DetectionOutput layer has another order of inputs unlike mxnet.
        Need to reorder _contrib_MultiBoxDetection inputs
        for correct conversion to DetectionOutput layer.

        Parameters
        ----------
        graph : Graph
           Graph with loaded model.
        """
        multi_box_detection_node = match['multi_box_detection']
        conf_node = multi_box_detection_node.in_node(0)
        loc_node = multi_box_detection_node.in_node(1)

        conf_edge_data = graph.get_edge_data(conf_node.id,
                                             multi_box_detection_node.id)
        conf_out_port = conf_edge_data[0]['out']
        conf_in_port = conf_edge_data[0]['in']

        loc_edge_data = graph.get_edge_data(loc_node.id,
                                            multi_box_detection_node.id)
        loc_out_port = loc_edge_data[0]['out']
        loc_in_port = loc_edge_data[0]['in']

        graph.remove_edge(conf_node.id, multi_box_detection_node.id)
        graph.remove_edge(loc_node.id, multi_box_detection_node.id)

        graph.create_edge(loc_node,
                          multi_box_detection_node,
                          in_port=conf_in_port,
                          out_port=conf_out_port)
        graph.create_edge(conf_node,
                          multi_box_detection_node,
                          in_port=loc_in_port,
                          out_port=loc_out_port)
Exemplo n.º 8
0
def load_parallel_component(file_descr, graph: Graph, prev_layer_id):
    """
    Load ParallelComponent of the Kaldi model.
    ParallelComponent contains parallel nested networks.
    VariadicSplit is inserted before nested networks.
    Outputs of nested networks concatenate with layer Concat.

    :param file_descr: descriptor of the model file
    :param graph: graph with the topology.
    :param prev_layer_id: id of the input layers for parallel component layer
    :return: id of the concat layer - last layer of the parallel component layers
    """
    nnet_count = read_token_value(file_descr, b'<NestedNnetCount>')
    log.debug(
        'Model contains parallel component with {} nested networks'.format(
            nnet_count))

    split_points = []
    outputs = []
    inputs = []

    for i in range(nnet_count):
        read_token_value(file_descr, b'<NestedNnet>')
        collect_until_token(file_descr, b'<Nnet>')
        g = Graph()
        load_kalid_nnet1_model(g, file_descr, 'Nested_net_{}'.format(i))

        # input to nnet1 models is of a rank 1 but we also insert batch_size to 0th axis
        # 1st axis contains input_size of the nested subnetwork
        # we split input from the main network to subnetworks
        input_node = Node(g, 'Parameter')
        split_points.append(input_node['shape'][1])
        g.remove_node(input_node.id)

        mapping = {
            node: graph.unique_id(node)
            for node in g.nodes(data=False) if node in graph
        }
        g = nx.relabel_nodes(g, mapping)
        for val in mapping.values():
            g.node[val]['name'] = val
        graph.add_nodes_from(g.nodes(data=True))
        graph.add_edges_from(g.edges(data=True))
        sorted_nodes = tuple(nx.topological_sort(g))

        outputs.append(Node(graph, sorted_nodes[-1]))
        inputs.append(Node(graph, sorted_nodes[0]))

    split_id = graph.unique_id(prefix='NestedNets/VariadicSplit')
    attrs = {
        'out_ports_count': nnet_count,
        'size_splits': split_points,
        'axis': 1,
        'name': split_id
    }
    variadic_split_node = AttributedVariadicSplit(graph, attrs).create_node()
    prev_layer_node = Node(graph, prev_layer_id)
    prev_layer_node.add_output_port(0)
    graph.create_edge(
        prev_layer_node, variadic_split_node, 0, 0,
        create_edge_attrs(prev_layer_id, variadic_split_node.id,
                          prev_layer_id))

    concat_id = graph.unique_id(prefix='Concat')
    graph.add_node(concat_id, parameters=None, op='concat', kind='op')
    concat_node = Node(graph, concat_id)

    # Connect each output of variadic_split_node to each subnetwork's inputs in ParallelComponent
    # and each subnetwork's output to concat_node
    for i, (input_node, output_node) in enumerate(zip(inputs, outputs)):
        output_node.add_output_port(0)
        concat_node.add_input_port(i)
        graph.create_edge(
            output_node, concat_node, 0, i,
            create_edge_attrs(output_node.id, concat_id, output_node.id, i, 0))
        graph.create_edge(
            variadic_split_node, input_node, i, 0,
            create_edge_attrs(variadic_split_node.id, input_node.id,
                              variadic_split_node.id, 0, i))
    return concat_id