def __init__(self, op: str, name: str = None, attrs: Dict[str, object] = None, inputs: List["Tensor"] = None, outputs: List["Tensor"] = None): """ A node represents an operation in a graph, and consumes zero or more Tensors, and produces zero or more Tensors. Args: op (str): The operation this node performs. name (str): The name of this node. attrs (Dict[str, object]): A dictionary that maps attribute names to their values. inputs (List[Tensor]): A list of zero or more input Tensors. outputs (List[Tensor]): A list of zero or more output Tensors. """ self.op = op self.name = misc.default_value(name, "") self.attrs = misc.default_value(attrs, OrderedDict()) self.inputs = misc.SynchronizedList(self, field_name="outputs", initial=misc.default_value( inputs, [])) self.outputs = misc.SynchronizedList(self, field_name="inputs", initial=misc.default_value( outputs, []))
def copy(self, tensor_map: "OrderedDict[str, Tensor]" = None): """ Copy the graph. This makes copies of all nodes and tensors in the graph, but will not do a deep-copy of weights or attributes (with the exception of ``Graph`` attributes, which will be copied using their ``copy`` method). Args: tensor_map (OrderedDict[str, Tensor]): A mapping of tensor names to tensors from the outer graph. This should be ``None`` if this is the outer-most graph. Returns: Graph: A copy of the graph. """ # First, reconstruct each tensor in the graph, but with no inputs or outputs tensor_map = copy.copy(misc.default_value(tensor_map, {})) local_tensor_copies = {} # When we're cloning a subgraph by itself, we need to use `tensors()` to get all # required tensors - even those produced by outer graphs. local_tensor_copies.update( {n: t.copy() for n, t in self.tensors().items()}) # However, we should prioritize copies already made by the outer graph. local_tensor_copies.update(tensor_map) # And locally produced tensors should take precedence over everything else. local_tensor_copies.update( {n: t.copy() for n, t in self._local_tensors().items()}) def get_tensor(name): if not name: return Variable.empty() return local_tensor_copies[name] # Next, copy nodes, and update inputs/outputs new_nodes = [] for node in self.nodes: new_node = node.copy( inputs=[get_tensor(inp.name) for inp in node.inputs], outputs=[get_tensor(out.name) for out in node.outputs], tensor_map=local_tensor_copies, ) new_nodes.append(new_node) new_graph_inputs = [get_tensor(inp.name) for inp in self.inputs] new_graph_outputs = [get_tensor(out.name) for out in self.outputs] return Graph( nodes=new_nodes, inputs=new_graph_inputs, outputs=new_graph_outputs, name=copy.copy(self.name), doc_string=copy.copy(self.doc_string), opset=copy.copy(self.opset), import_domains=self.import_domains, )
def __init__(self, nodes: Sequence[Node] = None, inputs: Sequence[Tensor] = None, outputs: Sequence[Tensor] = None, name=None, doc_string=None, opset=None, import_domains=None): """ Args: nodes (Sequence[Node]): A list of the nodes in this graph. inputs (Sequence[Tensor]): A list of graph input Tensors. outputs (Sequence[Tensor]): A list of graph output Tensors. name (str): The name of the graph. Defaults to "onnx_graphsurgeon_graph". doc_string (str): A doc_string for the graph. Defaults to "". """ self.nodes = misc.default_value(nodes, []) self.inputs = list(misc.default_value(inputs, [])) self.outputs = list(misc.default_value(outputs, [])) self.name = misc.default_value(name, "onnx_graphsurgeon_graph") self.__name__ = self.name self.doc_string = misc.default_value(doc_string, "") self.opset = misc.default_value(opset, Graph.DEFAULT_OPSET) self.import_domains = misc.default_value(import_domains, None) # Printing graphs can be very expensive G_LOGGER.ultra_verbose(lambda: "Created Graph: {:}".format(self)) # For layer() function self.name_idx = 0
def __init__(self, name: str, dtype: np.dtype=None, shape: Sequence[Union[int, str]]=None): """ Represents a Tensor whose value is not known until inference-time. Args: name (str): The name of the tensor. dtype (numpy.dtype): The data type of the tensor. shape (Sequence[Union[int, str]]): The shape of the tensor. This may contain strings if the model uses dimension parameters. """ self.name = name self.inputs = misc.SynchronizedList(self, field_name="outputs", initial=[]) self.outputs = misc.SynchronizedList(self, field_name="inputs", initial=[]) self.dtype = dtype self.shape = misc.default_value(shape, None)
def copy(self, tensor_map: "OrderedDict[str, Tensor]" = None): """ Copy the graph. This makes copies of all nodes and tensors in the graph, but will not do a deep-copy of weights or attributes (with the exception of ``Graph`` attributes, which will be copied using their ``copy`` method). Args: tensor_map (OrderedDict[str, Tensor]): A mapping of tensor names to tensors from the outer graph. This should be ``None`` if this is the outer-most graph. Returns: Graph: A copy of the graph. """ # First, reconstruct each tensor in the graph, but with no inputs or outputs tensor_map = copy.copy(misc.default_value(tensor_map, {})) local_tensors = self.tensors() local_tensor_copies = { name: tensor.copy() for name, tensor in local_tensors.items() } local_tensor_copies.update(tensor_map) def get_tensor(name): if not name: return Variable.empty() return local_tensor_copies[name] # Next, copy nodes, and update inputs/outputs new_nodes = [] for node in self.nodes: new_node = node.copy( inputs=[get_tensor(inp.name) for inp in node.inputs], outputs=[get_tensor(out.name) for out in node.outputs], tensor_map=local_tensor_copies) new_nodes.append(new_node) new_graph_inputs = [get_tensor(inp.name) for inp in self.inputs] new_graph_outputs = [get_tensor(out.name) for out in self.outputs] return Graph(nodes=new_nodes, inputs=new_graph_inputs, outputs=new_graph_outputs, name=copy.copy(self.name), doc_string=copy.copy(self.doc_string), opset=copy.copy(self.opset))
def import_graph(onnx_graph: onnx.GraphProto, tensor_map: "OrderedDict[str, Tensor]" = None, opset=None, import_domains: onnx.OperatorSetIdProto = None) -> Graph: """ Imports a Graph from an ONNX Graph. Args: onnx_graph (onnx.GraphProto): The ONNX graph to import. tensor_map (OrderedDict[str, Tensor]): A mapping of tensor names to Tensors. This is generally only useful for subgraph import. opset (int): The ONNX opset to use for this graph. """ tensor_map = copy.copy(misc.default_value( tensor_map, OrderedDict())) # Outer graph tensors, read-only subgraph_tensor_map = OrderedDict() # Tensors in this subgraph # Retrieves a Tensor from subgraph_tensor_map or the outer graph (tensor_map) if present, otherwise imports the tensor # If overwrite=True, this function will overwrite previously imported tensors # if the new tensor has more information available. def get_tensor(onnx_tensor: Union[onnx.ValueInfoProto, onnx.TensorProto], overwrite=False, check_outer_graph=True) -> Tensor: # Prioritize the subgraph even if check_outer_graph is set if onnx_tensor.name in subgraph_tensor_map: if overwrite: tensor = OnnxImporter.import_tensor(onnx_tensor) if isinstance(subgraph_tensor_map[onnx_tensor.name], Variable): subgraph_tensor_map[ onnx_tensor.name].dtype = subgraph_tensor_map[ onnx_tensor.name].dtype or tensor.dtype subgraph_tensor_map[ onnx_tensor.name].shape = subgraph_tensor_map[ onnx_tensor.name].shape or tensor.shape return subgraph_tensor_map[onnx_tensor.name] if check_outer_graph and onnx_tensor.name in tensor_map: return tensor_map[onnx_tensor.name] subgraph_tensor_map[onnx_tensor.name] = OnnxImporter.import_tensor( onnx_tensor) return subgraph_tensor_map[onnx_tensor.name] # Import initializers contents into Constants. G_LOGGER.verbose("Importing initializers") for initializer in onnx_graph.initializer: get_tensor(initializer) # Import all tensors whose shapes are known. Tensors may be repeated, and some of these # duplicates may not include shape/dtype information, so overwrite is set to True # so that we can capture all the information available about the tensor G_LOGGER.verbose("Importing tensors with known shapes") for tensor in onnx_graph.value_info: get_tensor(tensor, overwrite=True) # Import graph inputs and outputs. Initializers are not considered to be inputs. # Graph inputs and outputs can never come from the outer graph! initializer_names = set( [tensor.name for tensor in onnx_graph.initializer]) G_LOGGER.verbose("Importing graph inputs") graph_inputs = [] # List[Tensor] for inp in onnx_graph.input: if inp.name not in initializer_names: tensor = get_tensor(inp, check_outer_graph=False) graph_inputs.append(tensor) G_LOGGER.verbose("Importing graph outputs") graph_outputs = [] # List[Tensor] for out in onnx_graph.output: tensor = get_tensor(out, check_outer_graph=False) graph_outputs.append(tensor) G_LOGGER.verbose("Importing nodes") nodes = [] # List[Node] for onnx_node in onnx_graph.node: node = OnnxImporter.import_node(onnx_node, tensor_map, subgraph_tensor_map) nodes.append(node) return Graph(nodes=nodes, inputs=graph_inputs, outputs=graph_outputs, name=onnx_graph.name, doc_string=onnx_graph.doc_string, opset=opset, import_domains=import_domains)
def __init__(self, initial_tensors=None): tensors = misc.default_value(initial_tensors, []) self.seen_tensors = set([tensor.name for tensor in tensors])