def make_graph(): graph, _ = toposort_multi_tier_output_graph() graph.outputs.pop() # Deep copy should work with empty tensors graph.nodes[0].inputs.append(Variable.empty()) graph.nodes[0].outputs.append(Variable.empty()) return graph
def check_tensor(name: str): if name not in tensor_map: if name: G_LOGGER.debug( "Tensor: {:} was not generated during shape inference, or shape inference was not run on this model. Creating a new Tensor." .format(name)) tensor_map[name] = Variable(name) else: # Empty tensors are not tracked by the graph, as these represent optional inputs/outputs that have been omitted. G_LOGGER.verbose("Generating empty tensor") return Variable.empty() return tensor_map[name]
def lstm_model(): path = os.path.join(TEST_ROOT, "models", "lstm.onnx") model = onnx.load(path) onnx_graph = model.graph def load_initializer(index: int) -> np.ndarray: return onnx.numpy_helper.to_array(onnx_graph.initializer[index]) # Optional inputs are represented by empty tensors X = Variable(name="X", dtype=np.float32, shape=(4, 3, 6)) W = Constant(name="W", values=load_initializer(0)) R = Constant(name="R", values=load_initializer(1)) B = Constant(name="B", values=load_initializer(2)) initial_c = Constant(name="initial_c", values=load_initializer(3)) Y = Variable(name="Y", dtype=np.float32, shape=(4, 1, 3, 5)) Y_h = Variable(name="Y_h", dtype=np.float32, shape=(1, 3, 5)) Y_c = Variable(name="Y_c", dtype=np.float32, shape=(1, 3, 5)) attrs = OrderedDict() attrs["direction"] = "forward" attrs["hidden_size"] = 5 node = Node( op="LSTM", attrs=attrs, inputs=[X, W, R, B, Variable.empty(), Variable.empty(), initial_c], outputs=[Y, Y_h, Y_c], ) # Initializers will not be included in the graph inputs. return Model( path, inputs=[X], outputs=[Y, Y_h, Y_c], nodes=[node], opset=OnnxImporter.get_opset(model), )
def get_tensor(name: str, check_outer_graph=True): # Prioritize the subgraph even if check_outer_graph is set if name in subgraph_tensor_map: return subgraph_tensor_map[name] if check_outer_graph and name in tensor_map: return tensor_map[name] if not name: # Empty tensors are not tracked by the graph, as these represent optional inputs/outputs that have been omitted. G_LOGGER.verbose("Generating empty tensor") return Variable.empty() G_LOGGER.verbose("Tensor: {:} was not generated during shape inference, or shape inference was not run on this model. Creating a new Tensor.".format(name)) subgraph_tensor_map[name] = Variable(name) return subgraph_tensor_map[name]
def get_tensor(name): if not name: return Variable.empty() return local_tensor_copies[name]
def get_tensor(name): if not name: return Variable.empty() return tensor_map[name]