Exemple #1
0
    def test_adjust_gradient(self):
        X = dot(randn(5, 5), randn(5, 1000)) + randn(5, 1)
        Y = dot(randn(2, 2), randn(2, 1000)) + dot(randn(2, 5), X)

        meanIn = randn(5, 1)
        meanOut = randn(2, 1)
        preIn = randn(5, 5)
        preOut = randn(2, 2)
        predictor = randn(2, 5)

        pre = AffinePreconditioner(meanIn, meanOut, preIn, preOut, predictor)

        f = lambda X, Y: sum(hstack([p.ravel() for p in pre(X, Y)]))

        # compute analytic gradient
        dfdX, dfdY = pre.adjust_gradient(ones_like(X), ones_like(Y))

        # compute numerical gradient
        h = 0.1
        dfdXn = zeros_like(X)
        X_copy = X.copy()
        for i in range(X.shape[0]):
            for j in range(X.shape[1]):
                X_copy[i, j] = X[i, j] + h
                fp = f(X_copy, Y)

                X_copy[i, j] = X[i, j] - h
                fn = f(X_copy, Y)

                dfdXn[i, j] = (fp - fn) / (2. * h)

                X_copy[i, j] = X[i, j]

        self.assertLess(max(abs(dfdXn - dfdX)), 1e-7)
Exemple #2
0
	def test_adjust_gradient(self):
		X = dot(randn(5, 5), randn(5, 1000)) + randn(5, 1)
		Y = dot(randn(2, 2), randn(2, 1000)) + dot(randn(2, 5), X)

		meanIn = randn(5, 1)
		meanOut = randn(2, 1)
		preIn = randn(5, 5)
		preOut = randn(2, 2)
		predictor = randn(2, 5)

		pre = AffinePreconditioner(
			meanIn,
			meanOut,
			preIn,
			preOut,
			predictor)

		f = lambda X, Y: sum(hstack([p.ravel() for p in pre(X, Y)]))

		# compute analytic gradient
		dfdX, dfdY = pre.adjust_gradient(ones_like(X), ones_like(Y))

		# compute numerical gradient
		h = 0.1
		dfdXn = zeros_like(X)
		X_copy = X.copy()
		for i in range(X.shape[0]):
			for j in range(X.shape[1]):
				X_copy[i, j] = X[i, j] + h
				fp = f(X_copy, Y)

				X_copy[i, j] = X[i, j] - h
				fn = f(X_copy, Y)

				dfdXn[i, j] = (fp - fn) / (2. * h)

				X_copy[i, j] = X[i, j]

		self.assertLess(max(abs(dfdXn - dfdX)), 1e-7)