Beispiel #1
0
    def test_hessian_nodependence(self):
        X = T.matrix()
        cost = T.sum(X)

        with warnings.catch_warnings(record=True) as w:
            # The following should emit a warning
            hess = tf.hessian(cost, X)

            assert len(w) == 1
            assert "unused input" in str(w[-1].message)
Beispiel #2
0
    def test_hessian_nodependence(self):
        X = T.matrix()
        cost = T.sum(X)

        with warnings.catch_warnings(record=True) as w:
            # The following should emit a warning
            hess = tf.hessian(cost, X)

            assert len(w) == 1
            assert "unused input" in str(w[-1].message)
Beispiel #3
0
    def test_hessian_no_Rop(self):
        # Break the Rop in T.exp
        def new_Rop(x, y):
            raise NotImplementedError

        T.exp.R_op = new_Rop

        # Rebuild graph to force recompile
        X = T.vector()
        cost = T.exp(T.sum(X**2))

        # And check that all is still well
        hess = tf.hessian(cost, X)

        np_testing.assert_allclose(self.correct_hess, hess(self.Y, self.A))
Beispiel #4
0
    def test_hessian_no_Rop(self):
        # Break the Rop in T.exp
        def new_Rop(x, y):
            raise NotImplementedError

        T.exp.R_op = new_Rop

        # Rebuild graph to force recompile
        X = T.vector()
        cost = T.exp(T.sum(X**2))

        # And check that all is still well
        hess = tf.hessian(cost, X)

        np_testing.assert_allclose(self.correct_hess, hess(self.Y, self.A))
Beispiel #5
0
def compile(problem, need_grad, need_hess):
    # Conditionally load autodiff backend if needed
    if (problem.cost is None or
       (need_grad and problem.grad is None and problem.egrad is None) or
       (need_hess and problem.hess is None and problem.ehess is None)):
        if type(problem.ad_cost).__name__ == 'TensorVariable':
            from pymanopt.tools.autodiff import _theano as ad
        elif type(problem.ad_cost).__name__ == 'function':
            from pymanopt.tools.autodiff import _autograd as ad
        else:
            warn('Cannot identify autodiff backend from '
                 'ad_cost variable type.')

    if problem.cost is None:
        problem.cost = ad.compile(problem.ad_cost, problem.ad_arg)

    if need_grad and problem.egrad is None and problem.grad is None:
        problem.egrad = ad.gradient(problem.ad_cost, problem.ad_arg)
        # Assume if hessian is needed gradient is as well
        if need_hess and problem.ehess is None and problem.hess is None:
            problem.ehess = ad.hessian(problem.ad_cost, problem.ad_arg)
Beispiel #6
0
    def test_hessian(self):
        hess = tf.hessian(self.cost, self.X)

        # Now test hess
        np_testing.assert_allclose(self.correct_hess, hess(self.Y, self.A))
Beispiel #7
0
    def test_hessian(self):
        hess = tf.hessian(self.cost, self.X)

        # Now test hess
        np_testing.assert_allclose(self.correct_hess, hess(self.Y, self.A))