Exemple #1
0
    def testGradOfGrad(self):
        def square(x):
            return math_ops.multiply(x, x)

        grad = tfe.gradients_function(square)
        gradgrad = tfe.gradients_function(lambda x: grad(x)[0])
        self.assertEquals([2], [x.numpy() for x in gradgrad(3)])
  def testGradOfGrad(self):

    def square(x):
      return math_ops.multiply(x, x)

    grad = tfe.gradients_function(square)
    gradgrad = tfe.gradients_function(lambda x: grad(x)[0])
    self.assertEquals([2], [x.numpy() for x in gradgrad(3.)])
  def testGradients(self):

    def square(x):
      return math_ops.multiply(x, x)

    grad = tfe.gradients_function(square)
    self.assertEquals([6], [x.numpy() for x in grad(3.)])
  def testGradients(self):

    def square(x):
      return math_ops.multiply(x, x)

    grad = tfe.gradients_function(square)
    self.assertEquals([6], [x.numpy() for x in grad(3)])
Exemple #5
0
 def reference_func():
   gradval = tensors_to_numpy(tfe.gradients_function(func, params=wrt)(*args))
   if preserve_result:
     val = tensors_to_numpy(func(*args))
     if isinstance(gradval, (tuple)):
       return gradval + (val,)
     return gradval, val
   else:
     return gradval
Exemple #6
0
    def testCustomGrad(self):
        @tfe.custom_gradient
        def f(x):
            y = math_ops.multiply(x, x)

            def grad_fn(_):
                return [x + y]

            return y, grad_fn

        grad = tfe.gradients_function(f)
        self.assertEquals([12], [x.numpy() for x in grad(3)])
  def testCustomGrad(self):

    @tfe.custom_gradient
    def f(x):
      y = math_ops.multiply(x, x)

      def grad_fn(_):
        return [x + y]

      return y, grad_fn

    grad = tfe.gradients_function(f)
    self.assertEquals([12], [x.numpy() for x in grad(3.)])
Exemple #8
0
    def testCustomGrad(self):
        @tfe.custom_gradient
        def f(x):
            y = math_ops.multiply(x, x)

            def grad_fn(_):
                return [x + y]

            return y, grad_fn

        # TODO(ashankar): This [0] should ideally not be needed.
        grad = tfe.gradients_function(f, [0])
        self.assertEquals([12], [x.numpy() for x in grad(3)])
Exemple #9
0
  def testCustomGrad(self):

    @tfe.custom_gradient
    def f(x):
      y = math_ops.multiply(x, x)

      def grad_fn(_):
        return [x + y]

      return y, grad_fn

    # TODO(ashankar): This [0] should ideally not be needed.
    grad = tfe.gradients_function(f, [0])
    self.assertEquals([12], [x.numpy() for x in grad(3)])
Exemple #10
0
# Module 10: New Features in Tensorflow
# Eagle Execution

import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

import tensorflow as tf
from tensorflow.contrib.eager.python import tfe

tfe.enable_eager_execution()
x = [[2.]]
m = tf.matmul(x, x)
print(m)


def square(x):
    return tf.multiply(x, x)


grad = tfe.gradients_function(square)

print(square(3.))
print(grad(3.))

gradgrad = tfe.gradients_function(lambda x: grad(x)[0])
print(gradgrad(3.))
Exemple #11
0
    MIT License
    Copyright (c) 2018. Victor I. Afolabi. All rights reserved.
"""
import tensorflow as tf
# import tensorflow.contrib.eager as tfe
from tensorflow.contrib.eager.python import tfe
from sklearn import datasets, preprocessing, model_selection

# Enable eager mode.
# tf.enable_eager_execution()

data = datasets.load_iris()

TARGET_NAMES = {i: l for i, l in enumerate(data['target_names'])}

features = preprocessing.MinMaxScaler(feature_range=(-1, 1)).fit_transform(
    data['data'])
labels = preprocessing.OneHotEncoder(sparse=False).fit_transform(
    data['target'].reshape(-1, 1))

X_train, X_test, y_train, y_test = model_selection.train_test_split(
    features, labels, test_size=0.1)


def square_func(W):
    return tf.square(W)


f_grad = tfe.gradients_function(square_func, params=['W'])
print(f_grad(tf.constant(0.3)))
Exemple #12
0
 def reference_func():
     dxx = tfe.gradients_function(tfe.gradients_function(func))(*args)
     return tensors_to_numpy(tuple(t.numpy() for t in dxx))
Exemple #13
0
 def reference_func():
     return tensors_to_numpy(
         tfe.gradients_function(func, params=wrt)(*args))