def extract(cls, node):
        shape = shape_array([])
        # Extract output shape from `shape` attribute
        extracted_shape = tf_tensor_shape(node.pb.attr["shape"].shape)
        if len(extracted_shape) != 0:
            shape = extracted_shape
        else:
            # Extract output shape from `_output_shapes` attribute if it is possible
            extracted_output_shapes = node.pb.attr["_output_shapes"].list.shape
            if len(extracted_output_shapes) == 1:   # check if attribute not empty
                extracted_output_shapes = tf_tensor_shape(extracted_output_shapes[0])

                # Check equality of extracted shapes. We know some cases then Placeholder operation has empty `shape`
                # attribute value and non-empty `_output_shapes` attribute value and need co handle and support it.
                if len(extracted_output_shapes) > len(extracted_shape):
                    log.warning('Extracted shapes for Placeholder operation {} have different lengths: `shape` {} and '
                                '`_output_shapes` {}. Please, check if model is consistent'.format(
                        node.pb.name, extracted_shape, extracted_output_shapes))
                    if len(extracted_output_shapes) != 0:
                        shape = extracted_output_shapes

        attrs = {
            'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
            'shape': shape,
            'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
        }
        if node.pb.attr["shape"].shape.unknown_rank:
            attrs['shape'] = None
        Parameter.update_node_stat(node, attrs)
        return cls.enabled
Exemple #2
0
 def extract(cls, node):
     attrs = {
         'shape': node.shape,
         'data_type': np.float32,  # TODO: other types?
     }
     Parameter.update_node_stat(node, {})
     return cls.enabled
def insert_do(graph: Graph, replacement_descriptions: dict):
    do_outputs = replacement_descriptions['do_outputs']
    prior_boxes_node = Node(graph, 'ROIFeatureExtractor_2')
    num_classes = 81
    box_regressions_input_node = Node(
        graph, replacement_descriptions['box_regressions_input_node'])
    box_regressions_node = create_op_node_with_second_input(
        graph, Reshape, int64_array([-1, 4 * num_classes]),
        dict(name='box_regressions'), box_regressions_input_node)

    class_predicitons_node = Node(
        graph, replacement_descriptions['class_predicitons_node'])
    im_info_node = Parameter(graph, {
        "name": 'im_info',
        'shape': int64_array([1, 3])
    }).create_node()

    do_node = ExperimentalDetectronDetectionOutput(
        graph, {
            'name':
            'DetectionOutput',
            'class_agnostic_box_regression':
            0,
            'deltas_weights':
            np.array([10.0, 10.0, 5.0, 5.0]),
            'max_delta_log_wh':
            replacement_descriptions['max_delta_log_wh'],
            'nms_threshold':
            replacement_descriptions['nms_threshold'],
            'score_threshold':
            replacement_descriptions['score_threshold'],
            'num_classes':
            num_classes,
            'max_detections_per_image':
            replacement_descriptions['max_detections_per_image'],
            'post_nms_count':
            replacement_descriptions['post_nms_count']
        }).create_node()
    prior_boxes_node.out_port(1).connect(do_node.in_port(0))
    box_regressions_node.out_port(0).connect(do_node.in_port(1))
    class_predicitons_node.out_port(0).connect(do_node.in_port(2))
    im_info_node.out_port(0).connect(do_node.in_port(3))

    do_output_ports = [
        do_node.out_port(0),
        do_node.out_port(1),
        do_node.out_port(2)
    ]
    old_do_output_nodes = [Node(graph, node_id) for node_id in do_outputs]
    for old_node, new_port in zip(old_do_output_nodes, do_output_ports):
        old_node.out_port(0).get_connection().set_source(new_port)
    # the consumer of the second output port of the ExperimentalDetectronDetectionOutput is the Mul node which second
    # input is of type int64 so it is necessary to insert Cast to have data types match
    do_node.out_port(1).get_connection().insert_node(
        Cast(graph, {
            'dst_type': np.int64
        }).create_node())
Exemple #4
0
def create_parameter_with_empty_attrs(graph, param_name):
    graph.add_node(param_name, kind='op', op='Parameter', name=param_name, pb=None, shape=None)
    parameter_node = Node(graph, param_name)
    # need to manually update necessary attrs for the node because extractor will not be called
    # for it because the node does not have .pb attribute
    Parameter.update_node_stat(parameter_node, {})
    parameter_node['internal_layer_id'] = len(graph.nodes)

    return parameter_node
Exemple #5
0
 def extract(cls, node):
     t_type = node.pb.type.tensor_type
     attrs = {
         'shape': shape_array([d.dim_value if (not hasattr(d, 'dim_param') or d.dim_param == '') and d.dim_value != 0
                               else dynamic_dimension_value for d in t_type.shape.dim]),
         'data_type': TENSOR_TYPE_TO_NP_TYPE[t_type.elem_type]
     }
     Parameter.update_node_stat(node, attrs)
     return cls.enabled
Exemple #6
0
 def extract(cls, node):
     attrs = {
         'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
         'shape': tf_tensor_shape(node.pb.attr["shape"].shape),
         'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
     }
     if node.pb.attr["shape"].shape.unknown_rank:
         attrs['shape'] = None
     Parameter.update_node_stat(node, attrs)
     return cls.enabled
    def replace_sub_graph(graph: Graph, match: dict, **kwargs):
        inputs_dict = {}
        for u, v, edge_attrs in graph.out_edges(match['queue_deque'].id,
                                                data=True):
            out_port = edge_attrs['out']
            shape = match['fifo_queue'].shapes[out_port]
            if out_port not in inputs_dict:
                input_op = Parameter(graph, {'shape': shape.copy()})
                inputs_dict[out_port] = input_op.create_node([])
            graph.create_edge(inputs_dict[out_port], Node(graph, v),
                              edge_attrs['out'], edge_attrs['in'], edge_attrs)

        graph.remove_node(match['queue_deque'].id)
        graph.remove_node(match['fifo_queue'].id)
Exemple #8
0
    def replace_pattern(graph: Graph, match: dict):
        node = match['op']
        node_id = node['variable_id']

        i = 0
        node.in_port(0).disconnect()
        for dest in node.out_port(0).get_destinations():
            new_in = Parameter(graph, {'name': "Parameter_"+str(i)+"_for_"+node_id,
                                       'shape': dest.data.get_shape()}).create_node()
            i += 1
            dest.disconnect()
            new_in.out_port(0).connect(dest)
            log.error("Add input/output mapped {} -> {} ".format(new_in.name, "Result_for_"+node_id),
                      extra={'is_warning': True})
Exemple #9
0
    def replace_sub_graph(graph: Graph, match: dict, **kwargs):
        r"""
        Usually graph looks like:

          main_graph
            ...             Result
             |                 |
        image_batch      label_batch
                \        /
                batch_join
                /        \
        placeholder      fifo_queue

        Replacer works for both cases (that's why we have loop - 68 line):
            label_batch was marked as output
            there is no label_batch node
        """
        true_placeholder_shape = match['placeholder'].shape
        placeholder_shape = match['fifo_queue'].shapes[0]
        placeholder_data_type = match['fifo_queue'].types[0]
        # in case OOB conversion batch_size placeholder shape is not required
        # so use a shape specified in FIFOQueueV2 shapes list attribute
        assert true_placeholder_shape is None or true_placeholder_shape.ndim <= 1
        if true_placeholder_shape is not None and true_placeholder_shape.ndim == 1 and len(
                true_placeholder_shape) > 1:
            log.warning(
                'Placeholder \'{}\' got non 0-dimensional shape {} in FIFOQueue pattern. Placeholder will have the '
                'same shape after folding the pattern instead of {} shape which is original for the network.'
                ''.format(match['placeholder'].id, true_placeholder_shape,
                          placeholder_shape))
            placeholder_shape = true_placeholder_shape
        placeholder_name = match['fifo_queue'].name
        graph.erase_node(match['fifo_queue'])
        graph.erase_node(match['placeholder'])
        for _, out in match['batch_join'].out_nodes().items():
            if out.id != match['image_batch'].id:
                if out.out_node().op == 'Result':
                    graph.remove_node(out.out_node().id)
                graph.remove_node(out.id)
        graph.remove_node(match['batch_join'].id)
        placeholder = Parameter(
            graph, {
                'name': placeholder_name,
                'shape': placeholder_shape,
                'data_type': placeholder_data_type
            }).create_node()
        graph.create_edge(placeholder, match['image_batch'])
        log.info(
            "FIFOQueueV2 pattern was detected. New shape of placeholder {} is {}. Use -b to set batch size if "
            "needed".format(placeholder.id, placeholder['shape']))
Exemple #10
0
def convert_graph_inputs_to_parameters(internal_graph, internal_graph_proto):
    # create Parameter nodes for the body graph
    body_parameters = []
    body_parameter_names = []
    for idx, pb_node in enumerate(internal_graph_proto['input_arg']):
        param_id = internal_graph.unique_id(pb_node.name)
        internal_graph.add_node(param_id,
                                name=param_id,
                                kind='op',
                                op='Parameter',
                                pb=None,
                                shape=None)
        parameter_node = Node(internal_graph, pb_node.name)
        Parameter.update_node_stat(
            parameter_node, {
                'data_type':
                tf_dtype_extractor(pb_node.type),
                'permute_attrs':
                PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            })
        body_parameters.append(parameter_node)
        body_parameter_names.append(param_id)
    return body_parameters, body_parameter_names
Exemple #11
0
 def find_and_replace_pattern(self, graph: Graph):
     dynamic_inputs = {}
     for parameter in graph.get_op_nodes(op='Parameter'):
         param_shape = parameter.soft_get('shape', shape_array(dynamic_dimension_value))
         if not is_fully_defined(param_shape):
             parameter_name = parameter.soft_get('name', parameter.id)
             dynamic_inputs[parameter_name] = parameter
     if dynamic_inputs:
         log.error('The model contains input(s) with partially defined shapes: {}. '
                   'Starting from the 2022.1 release the Model Optimizer can generate an IR with partially defined '
                   'input shapes ("-1" dimension in the TensorFlow model or dimension with string value in the ONNX '
                   'model). Some of the OpenVINO plugins require model input shapes to be static, so you should '
                   'call "reshape" method in the Inference Engine and specify static input shapes. For optimal '
                   'performance, it is still recommended to update input shapes with fixed ones using "--input" or '
                   '"--input_shape" command-line parameters.'
                   .format(','.join('name="{}" shape="{}"'.format(name, Parameter.shape_serialize(parameter))
                                    for name, parameter in dynamic_inputs.items())),
                   extra={'is_warning': True})
     partial_infer(graph)
Exemple #12
0
    def cover_body_input_data_nodes_with_parameter_ops(ti: Node):
        body = ti.body

        op_port_map = []
        for record in ti.input_port_map:
            operation_node = get_internal_node_by_layer_id(ti, record['internal_layer_id'])
            real_in_port = TensorIterator.special_port_to_real_port(operation_node, copy(record['internal_port_id']))
            op_port_map.append((operation_node, real_in_port))

        for operation_node, in_port in op_port_map:
            data_node = operation_node.in_node(in_port)

            attrs = deepcopy(body.get_edge_data(data_node.id, operation_node.id)[0])
            body.remove_edge(data_node.id, operation_node.id)

            assert data_node.has_valid('shape'), \
                'Data node should have `shape` attribute set, but it`s not for node {}'.format(data_node.id)
            shape = data_node['shape'].copy()
            parameter_data_node = Parameter(body, {'shape': shape_array(shape)}).create_node_with_data()

            body.create_edge(src_node=parameter_data_node, dst_node=operation_node,
                             out_port=0, in_port=in_port, edge_attrs=attrs)
            del body.get_edge_data(parameter_data_node.id, operation_node.id)[0]['out']
Exemple #13
0
 def extract(cls, node):
     if 'value' in node.symbol_dict:
         Const.update_node_stat(node, {'value': node.symbol_dict['value']})
     else:
         Parameter.update_node_stat(node, {})
     return cls.enabled
Exemple #14
0
 def extract(cls, node):
     Parameter.update_node_stat(node)
     return cls.enabled
Exemple #15
0
 def extract(cls, node):
     Parameter.update_node_stat(
         node, {'shape': dim_to_shape(node.pb.input_param.shape[0].dim)})
     return cls.enabled