def activ_fns(): x = Double('x') z = ops.sigmoid(x) graph = GradGraph(z) graph.getOutput({x: 110.5}) graph.getGradients(wrt=x) return x.gradient == 0
def testOps(): x = Integer('x') y = ops.log(x) z = ops.exp(y) graph = GradGraph(z) graph.getOutput({x: 1}) graph.getGradients(wrt=x) return x.gradient == 1
def gradTestSimple(): a = Integer("a") b = Integer("b") e = (a + b) * (b + 1) graph = GradGraph(e) graph.getOutput({a: 2, b: 1}) graph.getGradients(wrt=b) return b.gradient == 5
def dotProduct(): x = DoubleTensor("Tensor1") y = x.dot([3, 4]) z = y.dot([4, 5]) graph = GradGraph(z) output = graph.getOutput({x: [3, 4]}) graph.getGradients(wrt=x) flag1 = np.all(output == [100, 125]) flag2 = np.all(x.gradient == [[12., 16.], [15., 20.]]) return flag1 and flag2
def TensorOp(): x = DoubleTensor("Tensor1") y = x - [3, 4] z = ops.log(y * x) graph = GradGraph(z) output = graph.getOutput({x: [10]}) assert(np.all(np.isclose(output, np.log(10 * (10 - np.asarray([3, 4])))))) graph.getGradients(wrt=x) a = 2 * 10 - np.asarray([3, 4]) b = 1.0 / np.exp(np.asarray(output)) return np.all(np.isclose(x.gradient, a * b))
def gradTestLong(): x = Integer("Int1x") y = Integer("Int2y") z = Integer("Int3z") p = Integer("Int4p") k = p * z n = (k + (y * p * z)) * z graph = GradGraph(n) graph.getOutput({x: 9, y: 9, z: 9, p: 2}) graph.getGradients(wrt=z) return True
def gradTestShort(): x = Integer("Int1x") y = Integer("Int2y") z = Integer("Int3z") p = Integer("Int4p") k = p * z t = y * k m = k + t n = m * z graph = GradGraph(n) graph.getOutput({x: 9, y: 9, z: 9, p: 2}) graph.getGradients(wrt=z) return z.gradient == 360