Esempio n. 1
0
def protobuf2nx(graph, pb: tf_v1.GraphDef):
    fill_graph_with_nodes(graph, pb.node, get_id=lambda pb: pb.name, get_attrs=protobuf_attrs)

    if hasattr(graph, 'op_names_statistic'):
        for node_name in graph.nodes:
            node = Node(graph, node_name)
            node_pb = node.soft_get('pb', None)
            if node_pb is not None:
                if hasattr(node_pb, 'op'):
                    graph.op_names_statistic[node_pb.op] += 1

    # Create a library with auxiliary functions used in TensorFlow 2 operations
    if hasattr(pb, 'library') and hasattr(pb.library, 'function'):
        graph.graph['library'] = {}
        for library_function in pb.library.function:
            function_name = library_function.signature.name
            graph.graph['library'][function_name] = {}
            graph.graph['library'][function_name]['input_arg'] = library_function.signature.input_arg
            graph.graph['library'][function_name]['output_arg'] = library_function.signature.output_arg
            graph.graph['library'][function_name]['node_def'] = library_function.node_def
            graph.graph['library'][function_name]['ret'] = library_function.ret
    # initial order of nodes in the GraphDef. It is used to specify order in
    # which merged nodes are added to the generated sub-graph GraphDef for the TensorFlow offload feature.
    graph.graph['initial_nodes_order'] = [node.name for node in pb.node]

    # Remove data dependency edges. This is needed for the TF offload case
    for _, attrs in list(graph.nodes(data=True)):
        pb = attrs['pb']
        if '_class' in pb.attr:
            index = 0
            while index < len(pb.attr['_class'].list.s):
                if re.match('^loc:@.*', pb.attr['_class'].list.s[index].decode('utf-8')):
                    del pb.attr['_class'].list.s[index]
                else:
                    index = index + 1
Esempio n. 2
0
def protobuf2nx(graph, pb: tf_v1.GraphDef):
    fill_graph_with_nodes(graph, pb.node, get_id=lambda pb: pb.name, get_attrs=protobuf_attrs)
    # initial order of nodes in the GraphDef. It is used to specify order in
    # which merged nodes are added to the generated sub-graph GraphDef for the TensorFlow offload feature.
    graph.graph['initial_nodes_order'] = [node.name for node in pb.node]

    # Remove data dependency edges. This is needed for the TF offload case
    for _, attrs in list(graph.nodes(data=True)):
        pb = attrs['pb']
        if '_class' in pb.attr:
            index = 0
            while index < len(pb.attr['_class'].list.s):
                if re.match('^loc:@.*', pb.attr['_class'].list.s[index].decode('utf-8')):
                    del pb.attr['_class'].list.s[index]
                else:
                    index = index + 1
Esempio n. 3
0
def add_initializers_and_inputs_to_graph(graph: Graph, graph_pb,
                                         data_nodes_map: dict):
    """
    The function adds nodes specified in the 'initializer' attribute of the pb and input nodes.
    :param graph: the Graph to add nodes to
    :param graph_pb: the graph protobuf message
    :param data_nodes_map: the dictionary with mapping of tensor names to node id and port
    :return: the list of Parameter nodes
    """
    initializers = Graph()
    fill_graph_with_nodes(initializers,
                          graph_pb.initializer,
                          get_id=lambda pb: pb.name,
                          get_attrs=protobuf_attrs)

    parameters = []
    # first go through all inputs and separate constant from placeholders
    for inp in graph_pb.input:
        name = str(inp.name)
        if graph.has_node(name):
            raise Error(
                'Name {} of input node already exists, input names are duplicated.',
                name)
        elif initializers.has_node(name):
            graph.add_node(name,
                           kind='op',
                           op='Const',
                           pb=inp,
                           pb_init=initializers.node[name]['pb'])
        else:
            graph.add_node(name, kind='op', op='Parameter', pb=inp)
            parameters.append(Node(graph, name))

        assert name not in data_nodes_map, 'Inconsistency between data_nodes_map and graph.nodes'
        data_nodes_map[name] = (name, 0)

    # go over all initializers and make sure that all of them are added to the graph
    for initializer in initializers.nodes():
        initializer_id = initializer
        if not graph.has_node(initializer_id):
            graph.add_node(initializer_id,
                           kind='op',
                           op='Const',
                           pb=initializers.node[initializer]['pb'],
                           pb_init=initializers.node[initializer]['pb'])
            data_nodes_map[initializer] = (initializer_id, 0)
    return parameters
Esempio n. 4
0
def protobuf2nx(graph, pb):
    '''Convert proto message with ONNX model to equivalent NX representation.
    All nodes and edges are restored here as ONNX model has op/data representation,
    that means that nodes are connected via tensor names. Name of tensors are defined
    on demand in nodes, so we have a code similar to Caffe here. '''
    # graph = fill_graph_with_nodes(graph, pb.graph.node, get_id=node_id, get_attrs=protobuf_attrs)
    # convert initializers to a NX graph for easier control of model consistency and to use it as a dictionary later
    initializers = Graph()
    fill_graph_with_nodes(initializers, pb.graph.initializer, get_id=lambda pb: pb.name, get_attrs=protobuf_attrs)

    # maps a tensor name to a node produced it and the node port: str -> (node_id, node_port)
    data_nodes_map = {}

    # first go through all inputs and separate constant from placeholders
    for inp in pb.graph.input:
        name = str(inp.name)
        if graph.has_node(name):
            raise Error('Name {} of input node already exists, input names are duplicated.', name)
        elif initializers.has_node(name):
            # this is a constant
            graph.add_node(name, kind='op', op='Const', pb=inp, pb_init=initializers.node[name]['pb'])
        else:
            # this is a placeholder
            graph.add_node(name, kind='op', op='Parameter', pb=inp)
        # add to a tensors map
        assert not name in data_nodes_map, 'Inconsistency between data_nodes_map and graph.nodes'
        data_nodes_map[name] = (name, 0)
    output_ids = []
    for outp in pb.graph.output:
        name = str(outp.name)
        if graph.has_node(name):
            log.error('Name {} of output node already exists in graph. Ignoring this output. If the output is required,'
                      ' please rename it.'.format(name), extra={'is_warning': True})
            continue
        else:
            # add fake node on output
            graph.add_node(name, kind='op', op='FakeOutput', pb=outp)
            output_ids.append(name)

    # go over all initializer and make sure that all of them are added to the graph
    for initializer in initializers.nodes():
        initializer_id = 'onnx_initializer_node_' + initializer
        if not graph.has_node(initializer_id):
            graph.add_node(initializer_id, kind='op', op='Const', pb=initializers.node[initializer]['pb'],
                           pb_init=initializers.node[initializer]['pb'])
            data_nodes_map[initializer] = (initializer_id, 0)

    # Go through all nodes in the original model order (because data nodes are defined on-the-fly and order is
    # important)
    for node in pb.graph.node:
        # create an NX node
        id = graph.unique_id(node_id(node))
        graph.add_node(id, pb=node, kind='op')

        # add incoming edges based on data_nodes_map
        for dst_port, inp in enumerate(node.input):
            # should add edge inp --> id
            if inp not in data_nodes_map:
                if inp == '':
                    # input is omitted; most likely it corresponds to an optional input for an operator
                    continue
                else:
                    raise Error(
                        'Reference to {} is not satisfied. A node refer not existing data tensor. ONNX model is not '
                        'consistent. Protobuf fragment: {}', inp, node)
            src_id, src_port = data_nodes_map[inp]

            assert (graph.has_node(src_id))
            edge_attrs = {
                'out': src_port,
                'in': dst_port,
                'name': inp,
                'fw_tensor_debug_info': [(inp, inp)],
                'in_attrs': ['in', 'name'],
                'out_attrs': ['out', 'name'],
                'data_attrs': ['fw_tensor_debug_info']
            }
            graph.add_edge(src_id, id, **edge_attrs)

        # add outgoing edges to data_nodes_map
        for src_port, out in enumerate(node.output):
            if out in output_ids:
                edge_attrs = {
                    'out': src_port,
                    'in': 0,
                    'name': out,
                    'fw_tensor_debug_info': [(out, out)],
                    'in_attrs': ['in', 'name'],
                    'out_attrs': ['out', 'name'],
                    'data_attrs': ['fw_tensor_debug_info']
                }
                graph.add_edge(id, out, **edge_attrs)
            if out in data_nodes_map:
                log.debug("Detected reuse of blob {}.".format(out))
            data_nodes_map[out] = (id, src_port)