def test_copy_with_subgraph_dup_const_tensors(self): inp = Constant("input", values=np.ones(dtype=np.float32, shape=(4, 5))) graph = Graph() # We'll use shape to distinguish inner/outer tensor subgraph_inp = Constant("input", values=np.ones(dtype=np.float32, shape=(1, 2))) subgraph = Graph() subgraph.outputs = [subgraph.identity(subgraph_inp)] graph.outputs = [graph.nested(inp, subgraph)] graph_copy = graph.copy() assert graph_copy.nodes[0].attrs["body"].nodes[0].inputs[0].shape == ( 1, 2)
def test_shape_gather(self, shape, indices): indices = np.array(indices) inp = Variable("input", dtype=np.float32, shape=shape) graph = Graph(inputs=[inp]) inp_shape = graph.shape(inp) shape_part = graph.gather(inp_shape, indices=indices) graph.outputs = [ graph.add(shape_part, shape_part), graph.gather(inp_shape, indices=[0]), graph.gather(inp_shape, indices=np.array(0)), ] graph.fold_constants() if shape is not None: assert isinstance(graph.outputs[0], Constant) expected_shape = np.array(shape)[indices].astype(np.int64) * 2 assert np.all(graph.outputs[0].values == expected_shape) else: assert isinstance(graph.outputs[0], Variable) assert isinstance(graph.outputs[1], Variable) assert isinstance(graph.outputs[2], Variable)
def test_shape_of_variable_tensor_dynamic_shape(self): var = Variable("var", dtype=np.float32, shape=("", -1, 0, 4)) graph = Graph(inputs=[var]) graph.outputs = [graph.shape(var)] graph.fold_constants().cleanup() assert len(graph.nodes) == 1 assert graph.nodes[0].op == "Shape" assert isinstance(graph.outputs[0], Variable)
def test_shape_of_constant_node(self): graph = Graph() values = np.ones((1, 3, 3), dtype=np.int64) const = graph.constant(values=values) graph.outputs = [graph.shape(const)] graph.fold_constants().cleanup() assert not graph.nodes assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == (1, 3, 3))
def test_node_used_only_in_nested_graph(self): X = Variable("X", dtype=np.float32, shape=(1, )) Y = Variable("Y", dtype=np.float32, shape=(1, )) graph = Graph(inputs=[X, Y]) X_p = graph.identity( X) # X_p is only used by the subgraph, not in the outer graph. subgraph_inp = Variable("subgraph_input", dtype=np.float32, shape=(1, )) subgraph = Graph(inputs=[subgraph_inp]) subgraph.outputs = [subgraph.add(subgraph_inp, X_p)] graph.outputs = [graph.nested(Y, subgraph)] graph.cleanup(remove_unused_graph_inputs=True) assert graph.nodes[0].op == "Identity" assert graph.nodes[0].inputs == [X]
def test_shape_of_variable_tensor_static_shape(self): var = Variable("var", dtype=np.float32, shape=(1, 3, 4)) graph = Graph(inputs=[var]) graph.inputs = [var] graph.outputs = [graph.shape(var)] graph.fold_constants().cleanup() assert not graph.nodes assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == (1, 3, 4))
def test_shape_of_variable_tensor_static_shape_no_fold(self): graph = Graph() var = Variable("var", dtype=np.float32, shape=(1, 3, 4)) graph.inputs = [var] graph.outputs = [graph.shape(var)] graph.fold_constants(fold_shapes=False).cleanup() assert len(graph.nodes) == 1 assert graph.nodes[0].op == "Shape" assert isinstance(graph.outputs[0], Variable)
def test_copy_with_subgraph_dup_tensors(self): inp = Variable("input", dtype=np.float32, shape=(4, 5)) graph = Graph(inputs=[inp]) # We'll use shape to distinguish inner/outer tensor subgraph_inp = Variable("input", dtype=np.float32, shape=(1, 2)) subgraph = Graph(inputs=[subgraph_inp]) graph.outputs = [graph.nested(inp, subgraph)] graph_copy = graph.copy() assert graph_copy.nodes[0].attrs["body"].inputs[0].shape == (1, 2)
def test_const_node(self): graph = Graph() values = np.ones((1, 3, 3), dtype=np.int64) graph.outputs = [graph.constant(values=values)] assert isinstance(graph.outputs[0], Variable) graph.fold_constants().cleanup() assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == values) assert not graph.nodes
def test_const_inp_but_non_foldable_nested_graph(self): cond = gs.Constant("cond", values=np.array(True)) X = gs.Variable("X", dtype=np.float32, shape=(1, )) graph = Graph(inputs=[X]) then_graph = Graph(name="Then") then_graph.outputs = [then_graph.add(X, X)] else_graph = Graph(name="Else") else_graph.outputs = [else_graph.add(X, else_graph.add(X, X))] # Even though if_op looks foldable because it has all constant inputs, # it's not, since its subgraphs depend on variables in the outer scope. graph.outputs = [graph.if_op(cond, then_graph, else_graph)] # This should not raise because the `If` node should be excluded from # constant folding. graph.fold_constants(error_ok=False).cleanup() assert graph.nodes[0].op == "If" assert len(then_graph.nodes) == 1 assert len(else_graph.nodes) == 2
def test_io_cannot_be_sync_list_on_assign(self): inp = Variable("input0", shape=(1, 3), dtype=np.float32) out = Variable("input1", shape=(1, 3), dtype=np.float32) node = Node("Add", inputs=[inp], outputs=[out]) assert isinstance(node.inputs, SynchronizedList) assert isinstance(node.outputs, SynchronizedList) graph = Graph(nodes=[node], inputs=[], outputs=[]) graph.inputs = node.inputs graph.outputs = node.outputs assert not isinstance(graph.inputs, SynchronizedList) assert not isinstance(graph.outputs, SynchronizedList)
def test_with_nested_graph(self): cond = gs.Variable("cond", dtype=np.bool, shape=(1, )) X = gs.Variable("X", dtype=np.float32, shape=(1, )) Y = gs.Constant("Y", values=np.ones((1, ), dtype=np.float32)) graph = Graph(inputs=[X, cond]) then_graph = Graph(name="Then") then_graph.outputs = [then_graph.add(Y, Y)] else_graph = Graph(name="Else") else_graph.outputs = [else_graph.add(X, else_graph.add(Y, Y))] graph.outputs = [graph.if_op(cond, then_graph, else_graph)] graph.fold_constants() graph.cleanup() assert len(then_graph.nodes) == 0 assert np.all(then_graph.outputs[0].values == (Y.values * 2)) assert len(else_graph.nodes) == 1 assert isinstance(else_graph.nodes[0].inputs[1], Constant) assert np.all(else_graph.nodes[0].inputs[1].values == (Y.values * 2))
def simple_foldable(): # Graph: # c = (a + b) # output = input + c # Should fold to: # output = input + c weights = np.ones(shape=(1, 3), dtype=np.float32) graph = Graph() inp = Variable("input", shape=(1, 3), dtype=np.float32) c = graph.add(weights, weights, name="c") out = graph.add(inp, c) graph.inputs = [inp] graph.outputs = [out] yield graph
def one_hop_foldable(): # Graph: # c = (a + b) # e = (c + d) # output = input + e # Should fold to: # output = input + e weights = np.ones(shape=(1, 3), dtype=np.float32) graph = Graph() inp = Variable("input", shape=(1, 3), dtype=np.float32) c = graph.add(weights, weights, name="c") e = graph.add(c, weights, name="e") out = graph.add(inp, e) graph.inputs = [inp] graph.outputs = [out] yield graph
def test_shape_of_variable_tensor_multiple_shapes(self): graph = Graph() var = Variable("var", dtype=np.float32, shape=(1, 3, 4)) var2 = Variable("var2", dtype=np.float32, shape=tuple()) # Scalar graph.inputs = [var, var2] graph.outputs = [ graph.shape(var), graph.identity(var), graph.shape(var2) ] graph.fold_constants().cleanup() assert len(graph.nodes) == 1 assert graph.nodes[0].op == "Identity" assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == (1, 3, 4)) assert isinstance(graph.outputs[2], Constant) assert np.all(graph.outputs[2].values == tuple())
def test_input_is_output(self): graph = Graph() A = Variable("A", dtype=np.float32, shape=(1, 1)) B = Variable("B", dtype=np.float32, shape=(1, 1)) C = graph.add(A, B) graph.inputs = [A, B] graph.outputs = [C, B, A] # Out of order w/ respect to Add node inputs # Graph should remain unchanged after cleanup, including I/O tensors. graph.cleanup() assert graph.inputs == [A, B] assert graph.outputs == [C, B, A] assert len(graph.nodes) == 1 assert graph.nodes[0].inputs == [A, B] assert graph.nodes[0].outputs == [C]
def test_shape_slice_single_input(self): inp = Variable("input", dtype=np.int64, shape=(5, 6, 3, 2)) graph = Graph(inputs=[inp]) inp_shape = graph.shape(inp) graph.outputs = [graph.slice(inp_shape)] slice_node = graph.outputs[0].inputs[0] slice_node.attrs = { "axes": [0], "starts": [1], "ends": [3], "steps": [2], } graph.fold_constants() assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == inp.shape[1:3:2])
def test_shape_slice(self, shape, starts, ends, axes, steps, expected): inp = Variable("input", dtype=np.float32, shape=shape) graph = Graph(inputs=[inp]) inp_shape = graph.shape(inp) graph.outputs = [ graph.slice(inp_shape, np.array(starts), np.array(ends), axes=np.array(axes), steps=np.array(steps)) ] graph.fold_constants() if expected: assert isinstance(graph.outputs[0], Constant) assert np.all(graph.outputs[0].values == expected) else: assert isinstance(graph.outputs[0], Variable)
def foldable_with_invalid_node(): # Graph # c = (a + b) # e = fake(d) # f = (e + c) # out = inp + f # # c should be folded even though e is the output of an # invalid node. weights = np.ones(shape=(1, 3), dtype=np.float32) graph = Graph() inp = Variable("input", shape=(1, 3), dtype=np.float32) c = graph.add(weights, weights, name="c") e = graph.fake(weights, name="e") f = graph.add(e, c, name="f") out = graph.add(inp, f, name="output") graph.inputs = [inp] graph.outputs = [out] yield graph