Пример #1
0
 def reference_func():
     func.__globals__['np'] = ag_np
     if preserve_result:
         val, gradval = ag_value_and_grad(func)(*deepcopy(args))
         return gradval, val
     else:
         return ag_grad(func)(*deepcopy(args))
Пример #2
0
 def reference_func():
     func.__globals__['np'] = ag_np
     if preserve_result:
         # Note: ag_value_and_grad returns (val, grad) but we need (grad, val)
         val, gradval = ag_value_and_grad(func)(*deepcopy(args))
         return gradval, val
     else:
         return ag_grad(func)(*deepcopy(args))
Пример #3
0
def test_nested_dict(motion, optimized):
    p = dict(i=dict(j=3.0, k=4.0))
    func = nested_dict
    df = tangent.grad(func, motion=motion, optimized=optimized, verbose=True)
    dx = df(p)

    df_ag = ag_grad(func)
    dx_ag = df_ag(p)
    for k in p['i']:
        assert np.allclose(dx['i'][k], dx_ag['i'][k])
Пример #4
0
def test_logistic_regression(motion, optimized):
    func = logistic_regression
    w = np.random.randn(3, 5)
    b = np.random.randn(5)
    input_ = np.random.rand(3)
    label = np.zeros(5)
    label[1] = 1

    func.__globals__['np'] = np
    df = tangent.grad(func,
                      wrt=(2, 3),
                      motion=motion,
                      optimized=optimized,
                      verbose=True)
    dw, db = df(input_, label, w, b)

    func.__globals__['np'] = ag_np
    ag_dw = ag_grad(func, argnum=2)(input_, label, w, b)
    ag_db = ag_grad(func, argnum=3)(input_, label, w, b)
    assert np.allclose(ag_dw, dw)
    assert np.allclose(ag_db, db)
Пример #5
0
def test_inlining_contextmanager(motion, optimized, a):
    func = inlining_contextmanager
    func = tangent.tangent(func)

    func.__globals__['np'] = np
    df = tangent.grad(func, motion=motion, optimized=optimized, verbose=True)
    dx = df(a)

    func.__globals__['np'] = ag_np
    df_ag = ag_grad(func)
    df_ag(a)
    assert np.allclose(dx, 2.9 * a**2)
Пример #6
0
def test_nested_dict(motion, optimized):
  p = dict(i=dict(j=3.0, k=4.0))
  func = nested_dict
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(p)

  df_ag = ag_grad(func)
  dx_ag = df_ag(p)
  for k in p['i']:
    assert np.allclose(dx['i'][k], dx_ag['i'][k])
Пример #7
0
def test_inlining_contextmanager(motion, optimized, a):
  func = inlining_contextmanager
  func = tangent.tangent(func)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(a)

  func.__globals__['np'] = ag_np
  df_ag = ag_grad(func)
  df_ag(a)
  assert np.allclose(dx, 2.9 * a**2)
Пример #8
0
 def _setdiff(self):
     self._subgradient = ag_grad(self.eval)
     self._jacobian = ag_jacobian(self.eval)
     self._hessian = ag_hessian(self.eval)
Пример #9
0
 def _setbatchdiff(self):
     self._batch_subgradient = ag_grad(self._batch_eval)
     self._batch_jacobian = ag_jacobian(self._batch_eval)
     self._batch_hessian = ag_hessian(self._batch_eval)
Пример #10
0
 def reference_func():
     func.__globals__['np'] = ag_np
     return ag_grad(ag_grad(func))(*args)