示例#1
0
文件: utils.py 项目: zouzias/tangent
 def tangent_func():
     func.__globals__['np'] = np
     df = tangent.autodiff(func,
                           mode='forward',
                           preserve_result=preserve_result,
                           wrt=wrt,
                           optimized=True,
                           verbose=1)
     args_ = args + (1.0, )  # seed gradient
     return df(*deepcopy(args_))
def _test_tf_hvp(func, optimized):
  a = tf.random_normal(shape=(300,))
  v = tf.reshape(a, shape=(-1,))

  modes = ['forward', 'reverse']
  for mode1 in modes:
    for mode2 in modes:
      if mode1 == mode2 == 'forward':
        continue
      df = tangent.autodiff(
          func,
          mode=mode1,
          motion='joint',
          optimized=optimized,
          check_dims=False)
      ddf = tangent.autodiff(
          df, mode=mode2, motion='joint', optimized=optimized, check_dims=False)
      dx = ddf(a, tf.constant(1.0), v)
      # We just ensure it computes something in this case.
      assert dx.shape == a.shape
def _test_hvp(func, optimized):
  np.random.seed(0)
  a = np.random.normal(scale=1, size=(300,)).astype('float32')
  v = a.ravel()

  modes = ['forward', 'reverse']
  for mode1 in modes:
    for mode2 in modes:
      if mode1 == mode2 == 'forward':
        continue
      df = tangent.autodiff(
          func,
          mode=mode1,
          motion='joint',
          optimized=optimized,
          check_dims=False)
      ddf = tangent.autodiff(
          df, mode=mode2, motion='joint', optimized=optimized, check_dims=False)
      dx = ddf(a, 1, v)
      hvp_ag = hessian_vector_product(func)
      dx_ag = hvp_ag(a, v)
      assert np.allclose(dx, dx_ag)
示例#4
0
def test_nested_dict(motion, optimized):
  p = dict(i=dict(j=3.0, k=4.0))
  func = nested_dict
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(p)

  df_ag = ag_grad(func)
  dx_ag = df_ag(p)
  for k in p['i']:
    assert np.allclose(dx['i'][k], dx_ag['i'][k])
示例#5
0
文件: utils.py 项目: zouzias/tangent
 def tangent_func():
     y = func(*deepcopy(args))
     if np.array(y).size > 1:
         init_grad = np.ones_like(y)
     else:
         init_grad = 1
     func.__globals__['np'] = np
     df = tangent.autodiff(func,
                           mode='reverse',
                           motion=motion,
                           optimized=optimized,
                           preserve_result=preserve_result,
                           verbose=1)
     if motion == 'joint':
         return df(*deepcopy(args) + (init_grad, ))
     return df(*deepcopy(args), init_grad=init_grad)
示例#6
0
def test_unpacking_args_saxpy(motion, optimized, a, b, c):
  func = unpacking_args_saxpy
  func = tangent.tangent(func)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df((a, b, c))

  df_num = utils.numeric_grad(func)
  dx_num = df_num((a, b, c))
  assert np.allclose(dx, dx_num)
示例#7
0
def test_inlining_contextmanager(motion, optimized, a):
  func = inlining_contextmanager
  func = tangent.tangent(func)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(a)

  func.__globals__['np'] = ag_np
  df_ag = ag_grad(func)
  df_ag(a)
  assert np.allclose(dx, 2.9 * a**2)
示例#8
0
def test_dict_saxpy(motion, optimized, a, b, c):
  func = dict_saxpy
  func = tangent.tangent(func)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(dict(a=a, b=b, c=c))

  df_num = utils.numeric_grad(func)
  dx_num = df_num(dict(a=float(a), b=float(b), c=float(c)))
  flat_dx, _ = flatten(dx)
  flat_dx_num, _ = flatten(dx_num)
  assert np.allclose(flat_dx, flat_dx_num)
示例#9
0
def make_M(F, B, NV):
    """ Returns a function that calculates the system Jacobian, given the flux
        function, and the non-conservative matrix (if necessary)
    """
    dFdQ = autodiff(F)

    def M(Q, d, pars=None):
        """ Returns the system Jacobian in direction d, given state Q
        """
        ret = zeros([NV, NV])
        for i in range(NV):
            x = zeros(NV)
            x[i] = 1
            ret[i] = dFdQ(Q, d, pars, x)
        if B is not None:
            ret += B(Q, d, pars)
        return ret

    return M
示例#10
0
def test_rnn(motion, optimized):
  func = rnn
  w = np.random.randn(2, 3)
  inputs = np.random.randn(3, 2)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      wrt=(0, 1),
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dinputs, dw = df(inputs, w)

  num_dinputs = utils.numeric_grad(func)(inputs, w)
  num_dw = utils.numeric_grad(lambda w, x: func(x, w))(w, inputs)
  assert np.allclose(num_dw, dw)
  assert np.allclose(num_dinputs, dinputs)
示例#11
0
def test_bilinear(optimized):
  func = bilinear
  D = 3
  np.random.seed(0)
  x = np.random.randn(1, D)
  h = np.random.randn(1, D)
  U = np.random.randn(D, D)
  w = np.random.randn(D, D)
  b = np.random.randn(1, D)

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      wrt=(0,),
      motion='joint',
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dx = df(x, h, U, w, b)

  num_dx = utils.numeric_grad(func)(x, h, U, w, b)
  assert np.allclose(num_dx, dx)
示例#12
0
def test_logistic_regression(motion, optimized):
  func = logistic_regression
  w = np.random.randn(3, 5)
  b = np.random.randn(5)
  input_ = np.random.rand(3)
  label = np.zeros(5)
  label[1] = 1

  func.__globals__['np'] = np
  df = tangent.autodiff(
      func,
      wrt=(2, 3),
      motion=motion,
      optimized=optimized,
      verbose=True,
      input_derivative=INPUT_DERIVATIVE.DefaultOne)
  dw, db = df(input_, label, w, b)

  func.__globals__['np'] = ag_np
  ag_dw = ag_grad(func, argnum=2)(input_, label, w, b)
  ag_db = ag_grad(func, argnum=3)(input_, label, w, b)
  assert np.allclose(ag_dw, dw)
  assert np.allclose(ag_db, db)
示例#13
0
文件: utils.py 项目: zouzias/tangent
def assert_forward_not_implemented(func, wrt):
    try:
        tangent.autodiff(func, mode='forward', preserve_result=False, wrt=wrt)
        assert False, 'Remove this when implementing.'
    except NotImplementedError:
        pass