Ejemplo n.º 1
0
    def test_apply_gradient_directory(self):
        """get_grad correctly fetches gradient Tensor from Variable"""
        w_0 = 6
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        with nn.variable_scope(self.get_scope()):
            x = nn.Input(shape=(), name='x')
            w1 = nn.make_variable(
                name='w',
                shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y1 = w1 * x
            sgd.minimize(y1, w1)
            dy1dw1_1 = nn.get_tensor('{}_grad'.format(w1.name))
            dy1dw1_2 = nn.get_grad(w1)

            self.assertIs(dy1dw1_1, dy1dw1_2)

        with nn.variable_scope('{}/2'.format(self.get_scope())):
            w2 = nn.make_variable(
                name='w',
                shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y2 = w2 * x
            sgd.minimize(y2, w2)
            dy2dw2_1 = nn.get_tensor('{}_grad'.format(w2.name))
            dy2dw2_2 = nn.get_grad(w2)

            self.assertIs(dy2dw2_1, dy2dw2_2)
Ejemplo n.º 2
0
    def test_apply_gradient_directory(self):
        """get_grad correctly fetches gradient Tensor from Variable"""
        w_0 = 6
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        with nn.variable_scope(self.get_scope()):
            x = nn.Input(shape=(), name='x')
            w1 = nn.make_variable(
                name='w', shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y1 = w1 * x
            sgd.minimize(y1, w1)
            dy1dw1_1 = nn.get_tensor('{}_grad'.format(w1.name))
            dy1dw1_2 = nn.get_grad(w1)

            self.assertIs(dy1dw1_1, dy1dw1_2)

        with nn.variable_scope('{}/2'.format(self.get_scope())):
            w2 = nn.make_variable(
                name='w', shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y2 = w2 * x
            sgd.minimize(y2, w2)
            dy2dw2_1 = nn.get_tensor('{}_grad'.format(w2.name))
            dy2dw2_2 = nn.get_grad(w2)

            self.assertIs(dy2dw2_1, dy2dw2_2)
Ejemplo n.º 3
0
 def test_make_variable_raises_when_creating_existing_variable(self):
     """make_variable raise when trying to create existent variable"""
     scope = self.get_scope()
     nn.make_variable(scope, shape=[3, 1])
     try:
         nn.make_variable(scope, shape=[3, 1])
     except ValueError:
         pass
     else:
         self.fail('make_variable should raise when '
                   'trying to create variable already exists.')
Ejemplo n.º 4
0
 def test_make_variable_raises_when_creating_existing_variable(self):
     """make_variable raise when trying to create existent variable"""
     scope = self.get_scope()
     nn.make_variable(scope, shape=[3, 1])
     try:
         nn.make_variable(scope, shape=[3, 1])
     except ValueError:
         pass
     else:
         self.fail(
             'make_variable should raise when '
             'trying to create variable already exists.'
         )
Ejemplo n.º 5
0
    def test_clip_gradients(self):
        """Gradients are clipped"""
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        shape = (32, 1)
        with nn.variable_scope(self.get_scope()):
            initializer = nn.fetch_initializer(
                'UniformInitializer')(min_value=-3, max_value=3)
            x = nn.make_variable(
                name='x', shape=shape, initializer=initializer)
            y = nn.ops.reduce_sum(x * x / 2)
            grads_and_vars = [
                (nn.ops.clip_by_value(grad, max_value=1, min_value=-1), var)
                for grad, var in nn.ops.compute_gradient(loss=y, wrt=x)
            ]
            op = sgd.apply_gradients(grads_and_vars)

        session = nn.Session()
        session.initialize()

        val_0 = session.run(outputs=x)
        session.run(updates=op)
        val_1_be = session.run(outputs=x)

        val_1_np = np.zeros(shape)
        val_1_np[val_0 > 1] = val_0[val_0 > 1] - 1
        val_1_np[val_0 < -1] = val_0[val_0 < -1] + 1
        np.testing.assert_almost_equal(val_1_be, val_1_np)
Ejemplo n.º 6
0
def create_constant_variable(shape, dtype, value=7, name='constant_varriable'):
    """Create Variable for test"""
    return nn.make_variable(
        name=name,
        shape=shape,
        dtype=dtype,
        initializer=nn.initializer.ConstantInitializer(value))
Ejemplo n.º 7
0
    def test_clip_gradients(self):
        """Gradients are clipped"""
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        shape = (32, 1)
        with nn.variable_scope(self.get_scope()):
            initializer = nn.fetch_initializer('UniformInitializer')(
                min_value=-3, max_value=3)
            x = nn.make_variable(name='x',
                                 shape=shape,
                                 initializer=initializer)
            y = nn.ops.reduce_sum(x * x / 2)
            grads_and_vars = [
                (nn.ops.clip_by_value(grad, max_value=1, min_value=-1), var)
                for grad, var in nn.ops.compute_gradient(loss=y, wrt=x)
            ]
            op = sgd.apply_gradients(grads_and_vars)

        session = nn.Session()
        session.initialize()

        val_0 = session.run(outputs=x)
        session.run(updates=op)
        val_1_be = session.run(outputs=x)

        val_1_np = np.zeros(shape)
        val_1_np[val_0 > 1] = val_0[val_0 > 1] - 1
        val_1_np[val_0 < -1] = val_0[val_0 < -1] + 1
        np.testing.assert_almost_equal(val_1_be, val_1_np)
Ejemplo n.º 8
0
def _get_y_equals_x_squared(scope, x_init):
    with nn.variable_scope(scope):
        x = nn.make_variable(
            name='x', shape=(), trainable=True,
            initializer=nn.initializer.ConstantInitializer(x_init))
        y = x * x
    return x, y
Ejemplo n.º 9
0
def create_random_variable(
        shape, dtype, min_value=0, max_value=1, name='random_variable'):
    """Create Variable with uniform randoml values for test"""
    return nn.make_variable(
        name=name, shape=shape, dtype=dtype,
        initializer=nn.initializer.UniformInitializer(
            min_value=min_value, max_value=max_value)
    )
Ejemplo n.º 10
0
 def get():
     x = nn.make_variable(name='x',
                          shape=[],
                          initializer=nn.initializer.ConstantInitializer(3))
     y = x * x
     return {
         'loss': y,
         'wrt': x,
     }
Ejemplo n.º 11
0
def _get_y_equals_x_squared(scope, x_init):
    with nn.variable_scope(scope):
        x = nn.make_variable(
            name='x',
            shape=(),
            trainable=True,
            initializer=nn.initializer.ConstantInitializer(x_init))
        y = x * x
    return x, y
Ejemplo n.º 12
0
 def get():
     x = nn.make_variable(
         name='x', shape=[],
         initializer=nn.initializer.ConstantInitializer(2.0))
     y = (x - 1.5) * (x - 1) * (x - 1) * (x + 1) * (x + 1) * (x + 1.5)
     return {
         'loss': y,
         'wrt': x,
     }
Ejemplo n.º 13
0
 def get():
     x = nn.make_variable(
         name='x', shape=[],
         initializer=nn.initializer.ConstantInitializer(3))
     y = x * x
     return {
         'loss': y,
         'wrt': x,
     }
Ejemplo n.º 14
0
 def test_get_variable_reuse_variable(self):
     """get_variable create variable"""
     scope = self.get_scope()
     var1 = nn.make_variable(scope, shape=[3, 1])
     be._set_flag(True)
     var2 = nn.get_variable(scope)
     self.assertIs(
         var1.unwrap(), var2.unwrap(),
         'Reused variable should be identical to the original variable')
Ejemplo n.º 15
0
 def test_get_variable_reuse_variable(self):
     """get_variable create variable"""
     scope = self.get_scope()
     var1 = nn.make_variable(scope, shape=[3, 1])
     be._set_flag(True)
     var2 = nn.get_variable(scope)
     self.assertIs(
         var1.unwrap(), var2.unwrap(),
         'Reused variable should be identical to the original variable'
     )
Ejemplo n.º 16
0
 def get():
     x = nn.make_variable(
         name='x',
         shape=[],
         initializer=nn.initializer.ConstantInitializer(2.0))
     y = (x - 1.5) * (x - 1) * (x - 1) * (x + 1) * (x + 1) * (x + 1.5)
     return {
         'loss': y,
         'wrt': x,
     }
Ejemplo n.º 17
0
def create_random_variable(shape,
                           dtype,
                           min_value=0,
                           max_value=1,
                           name='random_variable'):
    """Create Variable with uniform randoml values for test"""
    return nn.make_variable(name=name,
                            shape=shape,
                            dtype=dtype,
                            initializer=nn.initializer.UniformInitializer(
                                min_value=min_value, max_value=max_value))
Ejemplo n.º 18
0
 def test_compute_gradients_with_trainables(self):
     """compute_gradients computes gradients for trainable wrt"""
     with nn.variable_scope(self.get_scope()):
         xs = [nn.make_variable(
             name='x_{}'.format(i), shape=(), trainable=True,
         ) for i in range(3)]
         y = xs[0] + xs[1] + xs[2]
         grads_and_vars = nn.ops.compute_gradient(loss=y, wrt=xs)
     self.assertEqual(len(xs), len(grads_and_vars))
     for i, (grad, var) in enumerate(grads_and_vars):
         self.assertIs(xs[i], var)
         self.assertIsNotNone(grad)
Ejemplo n.º 19
0
    def test_concate_2d_axis_1(self):
        """Concatenate 2 2D tensors"""
        axis, shape1, shape2 = 1, (2, 5), (2, 3)
        with nn.variable_scope(self.get_scope(), reuse=False):
            var1 = nn.make_variable(name='name1', shape=shape1)
            var2 = nn.make_variable(name='name2', shape=shape2)
            conc_var = nn.layer.Concat(axis=axis).build([var1, var2])

        session = nn.Session()
        val1, val2 = np.random.rand(*shape1), np.random.rand(*shape2)
        conc_val = session.run(outputs=conc_var, givens={
            var1: val1, var2: val2,
        })

        expected = conc_val.shape
        found = conc_var.shape
        self.assertEqual(found, expected)

        expected = np.concatenate((val1, val2), axis=axis)
        found = conc_val
        np.testing.assert_almost_equal(found, expected)
Ejemplo n.º 20
0
 def test_compute_gradients(self):
     """compute_gradients returns None for non-trainable wrt"""
     with nn.variable_scope(self.get_scope()):
         xs = [nn.make_variable(
             name='x_{}'.format(i), shape=(), trainable=bool(i % 2),
         ) for i in range(5)]
         y = xs[0] + xs[1] + xs[2] + xs[3] + xs[4]
         grads_and_vars = nn.ops.compute_gradient(loss=y, wrt=xs)
     self.assertEqual(len(xs), len(grads_and_vars))
     for i, (grad, var) in enumerate(grads_and_vars):
         self.assertIs(xs[i], var)
         if i % 2:
             self.assertIsNotNone(grad)
         else:
             self.assertIsNone(grad)
Ejemplo n.º 21
0
    def test_get_gradients(self):
        """gradients can be retrieved with get_tensor"""
        scope = self.get_scope()
        with nn.variable_scope(scope):
            xs = [nn.make_variable(
                name='x_{}'.format(i), shape=(), trainable=True,
            ) for i in range(5)]
            y = xs[0] + xs[1] + xs[2] + xs[3] + xs[4]
            grads_and_vars = nn.ops.compute_gradient(loss=y, wrt=xs)

            for i in range(5):
                grad = nn.get_tensor('{}_grad'.format(xs[i].name))
                self.assertIs(grads_and_vars[i][0], grad)

        for i in range(5):
            grad = nn.get_tensor('{}/{}_grad'.format(scope, xs[i].name))
            self.assertIs(grads_and_vars[i][0], grad)
Ejemplo n.º 22
0
    def test_get_variable_creates_variable(self):
        """get_variable create variable"""
        scope, var_name = self.get_scope(), 'foo'
        full_name = '/'.join([scope, var_name])

        self.assertTrue(full_name not in _VARIABLES)
        with nn.variable_scope(scope, reuse=True):
            with self.assertRaises(ValueError):
                nn.get_variable(var_name)

        with nn.variable_scope(scope, reuse=False):
            variable = nn.make_variable(var_name, shape=[3, 1])
        self.assertTrue(full_name in _VARIABLES)

        self.assertIs(variable, _VARIABLES[full_name])
        with nn.variable_scope(scope, reuse=True):
            self.assertIs(variable, nn.get_variable(var_name))
Ejemplo n.º 23
0
def _run_initializer(initializer, shape):
    if _transpose_needed(initializer, shape):
        # Shape is given in Theano's filter order, which is
        # [#out-channel, #in-channel, height, width].
        # So as to compute fan-in and fan-out correctly in Tensorflow,
        # we reorder this to
        # [height, width, #in-channel, #out-channel],
        shape = [shape[2], shape[3], shape[1], shape[0]]

    variable = nn.make_variable(
        shape=shape, name='input', initializer=initializer)
    session = nn.Session()
    session.initialize()
    value = session.run(outputs=variable, name='test')

    if _transpose_needed(initializer, shape):
        # So as to make the output comarison easy, we revert the oreder.
        shape = [shape[3], shape[2], shape[0], shape[1]]
    return value
Ejemplo n.º 24
0
def _create_variables(shape=(3, 4)):
    init = nn.initializer.ConstantInitializer
    src = nn.make_variable('source', shape=shape, initializer=init(value=1))
    tgt = nn.make_variable('taget', shape=shape, initializer=init(value=0))
    return src, tgt
Ejemplo n.º 25
0
def create_constant_variable(shape, dtype, value=7, name='constant_varriable'):
    """Create Variable for test"""
    return nn.make_variable(
        name=name, shape=shape, dtype=dtype,
        initializer=nn.initializer.ConstantInitializer(value)
    )