def test_nested_irs(): mg1 = pir.Ir().main_graph() mg2 = pir.Ir().main_graph() # You cant test graphs with different irs with pytest.raises(RuntimeError): with mg1: with mg2: pass
def test_tensor_id_conflict_between_Ir(): ir1 = pir.Ir() with ir1.main_graph(): t1 = pir.variable(1, dtype=pir.float32, name="tensor") ir2 = pir.Ir() with ir2.main_graph(): t2 = pir.variable(1, dtype=pir.float32, name="tensor") assert 2 == len(set([t1, t2])) # test __hash__ assert t1 != t2 # test __eq__
def test_repeat_simple_addition(self, repeat_count: int): """Test that a simple x = x + 1 repeated `repeat_count` times will produce x = `repeat_count` Args: repeat_count (int): Number of times to repeat. """ ir = pir.Ir() main = ir.main_graph() with main: one = pir.constant(0, pir.dtypes.int32) add_one = AddOne() add_one_graph = ir.create_graph(add_one, one) y = ops.repeat(add_one_graph, repeat_count, one, subgraph_in_to_parent_in={}) d2h = pir.d2h_stream(y.shape, pir.dtypes.int32, name="y_stream") ops.host_store(d2h, y) r_y = run_ir(ir, 1, d2h.tensor_id(), {}) assert r_y == repeat_count
def test_repeat_error(self, repeat_count: int): """Test an error is thrown with incorrect repeat_count Args: repeat_count (int): Number of times to repeat. """ ir = pir.Ir() main = ir.main_graph() with main: h2d = pir.h2d_stream((2, 16), pir.dtypes.float32) x = ops.host_load(h2d, "x") W = pir.variable(np.random.normal(0, 0.1, (16, 16)), name="W") b = pir.variable(np.zeros(16), name="b") linear = Linear() linear_graph = ir.create_graph(linear, x, out_features=16) with pytest.raises(ValueError) as e_info: y = ops.repeat(linear_graph, repeat_count, x, subgraph_in_to_parent_in={ linear.W: W, linear.b: b }) assert e_info.value.args[0].startswith( "Repeat trip count for repeat of")
def test_hook(): ir = pir.Ir() g = ir.main_graph() called = False def hook(_): nonlocal called called = True handle = g.register_op_created_hook(hook) with g: x = pir.variable(1) x = x + 1 assert called called = False # Creating this graph will create # an AddOp on the new graph. # Ensure this does not trigger the hook. sg = ir.create_graph(lambda y: y + 1, x) assert not called g.remove_op_created_hook(handle) with g: x = x + 1 assert not called
def test_get_ir(self): ir = pir.Ir() main = ir.main_graph() with main: a = pir.variable(1) assert a.ir() == ir
def test_axes_not_squeezable(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) with pytest.raises(ValueError) as excinfo: c = ops.squeeze(a, axes=[1])
def test_invalid_int(self): ir = pir.Ir() g = ir.main_graph() with g: t = pir.variable([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]) splits = 2 with pytest.raises(ValueError) as excinfo: c = ops.split(t, splits)
def test_fn(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) c = ops.softmax(a, axis=1) assert len(g.get_tensors()) == 2 assert contains_op_of_type("Softmax", _ir.op.SoftmaxOp, g)
def test_invalid_list(self): ir = pir.Ir() g = ir.main_graph() with g: t = pir.variable([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]) splits = [1, 2] with pytest.raises(popart_exception) as excinfo: c = ops.split(t, splits)
def test_flatten(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) c = ops.flatten(a) assert c.shape == (6, ) assert contains_op_of_type("Reshape", _ir.op.ReshapeOp, g)
def test_dunder_scalar_and_slice(self): ir = pir.Ir() with ir.main_graph(): t = pir.variable(data) y = t[0, 3:0:-1, 2] y_host = run_ir(ir, y) y_numpy = data[0, 3:0:-1, 2] assert_array_equal(y_host, y_numpy)
def test_error_lengths(self, inplace): ir = pir.Ir() with ir.main_graph(): t = pir.variable(data) with pytest.raises(ValueError): if inplace: y = ops.slice_(t, start=[2], stop=[3, 4], axis=[2, 1]) else: y = ops.slice(t, start=[2], stop=[3, 4], axis=[2, 1])
def test_fn(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) c = ops.gelu(a) assert len(g.get_tensors()) == 2 assert contains_op_of_type("Gelu", _ir.op.GeluOp, g)
def test_negative(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) c = a.reshape((-1, 1)) assert c.shape == (6, 1) assert contains_op_of_type("Reshape", _ir.op.ReshapeOp, g)
def test_construction1(self, t_class): """Test construction of tensors that hold 0-d data at graph creation.""" ir = pir.Ir() main = ir.main_graph() with main: t = ctor_map[t_class](1.0) assert t.dtype == pir.float32 assert t.shape == () assert t.nelms == 1
def test_cmp(self): ir = pir.Ir() main = ir.main_graph() with main: a = pir.variable(1) b = pir.variable(1) assert a != b # test __eq__ assert len(set([a, b])) == 2 # test __hash__ str(a) # test __repr__
def test_cache_env(): os.environ['POPART_CACHE_DIR'] = 'PATH_TO_CACHE' ir = pir.Ir() opts = ir._pb_ir.getSessionOptions() assert opts.enableEngineCaching assert opts.cachePath == 'PATH_TO_CACHE' del os.environ['POPART_CACHE_DIR']
def test_fn(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(True, pir.bool) c = ops.logical_not(a) assert len(g.get_tensors()) == 2 assert len(g.get_variables()) == 1 assert contains_op_of_type("Not", _ir.op.NotOp, g)
def test_get_main_graph(): ir = pir.Ir() main = ir.main_graph() with main: sg = ir.create_empty_graph('sg') # Main graph does not necessarily need to be top of context with sg: assert pir.gmg() == main
def test_squeeze_specified_negative(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((4, 1, 3, 1, 4, 5, 1))) c = ops.squeeze(a, axes=[-4, 1]) assert c.shape == (4, 3, 4, 5, 1) assert len(g.get_tensors()) == 2 assert contains_op_of_type("Reshape", _ir.op.ReshapeOp, g)
def test_tensor_id_conflict(): ir = pir.Ir() main = ir.main_graph() with main: name0 = pir.variable(1, name="tensor").id name1 = pir.variable(1, name="tensor").id name2 = pir.constant(1, name="tensor").id assert name0 == "tensor" ids = [name0, name1, name2] assert len(ids) == len(set(ids))
def test_constructor(): """Test that the `Ir` constructor sets the internal object state correctly. """ ir = pir.Ir() # The low-level IR should have only one graph - the main graph. assert len(ir._pb_ir.getAllGraphs()) == 1 main_graph = ir.main_graph() assert isinstance(main_graph, pir.Graph)
def test_fn(self): ir = pir.Ir() g = ir.main_graph() with g: seed = pir.variable(np.array([32, 32]), dtype=dtypes.uint32) x = ops.random_normal(seed, (2, 2)) assert len(g.get_tensors()) == 2 assert len(g.get_variables()) == 1 assert contains_op_of_type("RandomNormal", _ir.op.RandomNormalOp, g)
def test_dunder(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(1) c = -a assert len(g.get_tensors()) == 2 assert len(g.get_variables()) == 1 assert contains_op_of_type("Neg", _ir.op.NegateOp, g)
def test_fn_with_no_producer(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(1) a_1 = ops.ipu_copy(a, 1, 0) assert len(g.get_variables()) == 1 assert len(g.get_tensors()) == 2 assert contains_op_of_type("IpuCopy", _ir.op.IpuCopyOp, g)
def test_tensor_method(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((1, 2, 3))) c = a.reshape((2, 3, 1)) assert c.shape == (2, 3, 1) assert len(g.get_tensors()) == 2 assert contains_op_of_type("Reshape", _ir.op.ReshapeOp, g)
def test_squeeze_all(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(np.ones((4, 1, 3, 1))) c = ops.squeeze(a) assert c.shape == (4, 3) assert len(g.get_tensors()) == 2 assert contains_op_of_type("Reshape", _ir.op.ReshapeOp, g)
def test_fn(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(1) c = ops.increment_mod(a, 1, 3) assert len(g.get_tensors()) == 2 assert len(g.get_variables()) == 1 assert contains_op_of_type("IncrementMod", _ir.op.IncrementModOp, g)
def test_needs_casting(self): ir = pir.Ir() g = ir.main_graph() with g: a = pir.variable(1, pir.int32) c = ops.logical_not(a) assert len(g.get_tensors()) == 3 assert len(g.get_variables()) == 1 assert contains_op_of_type("Not", _ir.op.NotOp, g)