def test_rowwise_add(self): op = create_op("rowwise_add") inputs = { "X": np.random.uniform(0.1, 1, [5, 10]).astype("float32"), "b": np.random.uniform(0.1, 1, [10]).astype("float32") } self.check_grad(op, inputs, set(["X", "b"]), "Out")
def test_grad(self): op = create_op("sigmoid") inputs = {"X": np.random.uniform(0.1, 1, [11, 17]).astype("float32")} # compare gpu and cpu results for backward op. # this test will be skiped if only compiling CPU version. self.compare_grad(op, inputs) # check gradients self.check_grad(op, inputs, set("X"), "Y", max_relative_error=0.007)
def test_mul(self): op = create_op("mul") inputs = { 'X': np.random.random((32, 84)).astype("float32"), 'Y': np.random.random((84, 100)).astype("float32") } # mul op will enlarge the relative error self.check_grad( op, inputs, set(["X", "Y"]), "Out", max_relative_error=0.5)
def test_check_grad(self): op = create_op("onehot_cross_entropy") batch_size = 30 class_num = 10 inputs = { "X": numpy.random.uniform(0.1, 1.0, [batch_size, class_num]).astype("float32"), "label": (class_num / 2) * numpy.ones(batch_size).astype("int32") } self.check_grad(op, inputs, set("X"), "Y")
def test_scatter_grad(self): op = create_op("scatter") # test data setup ref_np = numpy.ones((3, 10)).astype("float32") index_np = numpy.array([1, 2]).astype("int32") updates_np = numpy.random.random((2, 10)).astype("float32") output_np = numpy.copy(ref_np) output_np[index_np] += updates_np inputs = {'Ref': ref_np, 'Index': index_np, 'Updates': updates_np} self.check_grad(op, inputs, set(["Updates", "Ref"]), "Out", in_place=True)
def test_softmax(self): op = create_op("softmax") inputs = {"X": np.random.uniform(0.1, 1, [10, 10]).astype("float32")} self.check_grad(op, inputs, set("X"), "Y")
def test_gather_grad(self): op = create_op("gather") xnp = numpy.random.random((10, 20)).astype("float32") inputs = {'X': xnp, 'Index': numpy.array([1, 3, 5]).astype("int32")} self.check_grad(op, inputs, set("X"), "Out")
def test_normal(self): op = create_op("mean") inputs = {"X": np.random.random((10, 10)).astype("float32")} self.check_grad(op, inputs, set("X"), "Out")