コード例 #1
0
    def test_conv2dtranspose(self):
        """Compnents consisting Conv2DTranspose layer are retrieved"""
        scope = self.get_scope()
        with nn.variable_scope(scope) as vs:
            input_ = nn.Input(shape=(32, 4, 8, 8), name='input')
            layer = nn.get_layer('Conv2D')(filter_height=4,
                                           filter_width=4,
                                           n_filters=4,
                                           strides=1,
                                           with_bias=True,
                                           name='Conv2D')
            output = layer(input_)
            layer = nn.get_layer('Conv2DTranspose')(filter_height=4,
                                                    filter_width=4,
                                                    n_filters=4,
                                                    strides=1,
                                                    with_bias=True,
                                                    output_shape=input_.shape,
                                                    name='Conv2DT')
            output = layer(output)
            filters = layer.get_parameter_variable('filter')
            bias = layer.get_parameter_variable('bias')

        with nn.variable_scope(vs, reuse=True):
            self.assertIs(filters, nn.get_variable('Conv2DT/filter'))
            self.assertIs(bias, nn.get_variable('Conv2DT/bias'))
            self.assertIs(output, nn.get_tensor('Conv2DT/output'))
            self.assertIs(input_, nn.get_input('input'))
コード例 #2
0
    def test_concate_2d_axis_1_3(self):
        """Concatenate 3 2D tensors"""
        axis, shape1, shape2, shape3 = 1, (2, 5), (2, 3), (2, 4)
        with nn.variable_scope(self.get_scope(), reuse=False):
            var1 = nn.get_variable(name='var1', shape=shape1)
            var2 = nn.get_variable(name='var2', shape=shape2)
            var3 = nn.get_variable(name='var3', shape=shape3)
            conc_var = nn.layer.Concat(axis=axis).build([var1, var2, var3])

        session = nn.Session()
        val1, val2 = np.random.rand(*shape1), np.random.rand(*shape2)
        val3 = np.random.rand(*shape3)
        conc_val = session.run(outputs=conc_var,
                               givens={
                                   var1: val1,
                                   var2: val2,
                                   var3: val3
                               })
        expected = conc_val.shape
        found = conc_var.shape
        self.assertEqual(found, expected)

        expected = np.concatenate((val1, val2, val3), axis=axis)
        found = conc_val
        np.testing.assert_almost_equal(found, expected)
コード例 #3
0
    def test_apply_gradient_directory(self):
        """get_grad correctly fetches gradient Tensor from Variable"""
        w_0 = 6
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        with nn.variable_scope(self.get_scope()):
            x = nn.Input(shape=(), name='x')
            w1 = nn.get_variable(
                name='w', shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y1 = w1 * x
            sgd.minimize(y1, w1)
            dy1dw1_1 = nn.get_tensor('{}_grad'.format(w1.name))
            dy1dw1_2 = nn.get_grad(w1)

            self.assertIs(dy1dw1_1, dy1dw1_2)

        with nn.variable_scope('{}/2'.format(self.get_scope())):
            w2 = nn.get_variable(
                name='w', shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y2 = w2 * x
            sgd.minimize(y2, w2)
            dy2dw2_1 = nn.get_tensor('{}_grad'.format(w2.name))
            dy2dw2_2 = nn.get_grad(w2)

            self.assertIs(dy2dw2_1, dy2dw2_2)
コード例 #4
0
def _create_variables(shape=(3, 4)):
    init = nn.initializer.ConstantInitializer
    with nn.variable_scope('source'):
        src = nn.get_variable('source', shape=shape, initializer=init(value=1))
    with nn.variable_scope('target'):
        tgt = nn.get_variable('taget', shape=shape, initializer=init(value=0))
    return src, tgt
コード例 #5
0
 def test_get_variable_reuse_variable(self):
     """get_variable create variable"""
     scope = self.get_scope()
     var1 = nn.get_variable(scope, shape=[3, 1])
     be._set_flag(True)
     var2 = nn.get_variable(scope)
     self.assertIs(
         var1.unwrap(), var2.unwrap(),
         'Reused variable should be identical to the original variable')
コード例 #6
0
ファイル: scope_test.py プロジェクト: mot0/luchador
 def test_get_variable_raises_when_reuseing_non_existent_variable(self):
     """get_variable raise when trying to reuse non existent variable"""
     be._set_flag(True)
     try:
         nn.get_variable('non_existing_variable_name')
     except ValueError:
         pass
     else:
         self.fail('get_variable should raise when '
                   'trying to reuse non existent variable.')
コード例 #7
0
 def test_get_variable_raises_when_creating_already_existing_variable(self):
     """get_variable raise when trying to create existent variable"""
     scope = self.get_scope()
     nn.get_variable(scope, shape=[3, 1])
     try:
         nn.get_variable(scope)
     except ValueError:
         pass
     else:
         self.fail('get_variable should raise when '
                   'trying to create variable already exists.')
コード例 #8
0
ファイル: scope_test.py プロジェクト: mthrok/luchador
 def test_get_variable_raises_when_reuseing_non_existent_variable(self):
     """get_variable raise when trying to reuse non existent variable"""
     be._set_flag(True)
     try:
         nn.get_variable('non_existing_variable_name')
     except ValueError:
         pass
     else:
         self.fail(
             'get_variable should raise when '
             'trying to reuse non existent variable.'
         )
コード例 #9
0
    def test_dense(self):
        """Compnents consisting Dense layer are retrieved"""
        with nn.variable_scope(self.get_scope()) as vs:
            input_ = nn.Input(shape=(32, 5), name='input')
            layer = nn.fetch_layer('Dense')(
                n_nodes=4, with_bias=True, scope='Dense')
            output = layer(input_)
            weight = layer.get_parameter_variable('weight')
            bias = layer.get_parameter_variable('bias')

        with nn.variable_scope(vs, reuse=True):
            self.assertIs(weight, nn.get_variable('Dense/weight'))
            self.assertIs(bias, nn.get_variable('Dense/bias'))
            self.assertIs(output, nn.get_tensor('Dense/output'))
            self.assertIs(input_, nn.get_input('input'))
コード例 #10
0
def create_constant_variable(shape, dtype, value=7, name='constant_varriable'):
    """Create Variable for test"""
    return nn.get_variable(
        name=name,
        shape=shape,
        dtype=dtype,
        initializer=nn.initializer.ConstantInitializer(value))
コード例 #11
0
    def test_clip_gradients(self):
        """Gradients are clipped"""
        sgd = nn.optimizer.SGD(learning_rate=1.0)
        shape = (32, 1)
        with nn.variable_scope(self.get_scope()):
            initializer = nn.get_initializer('UniformInitializer')(minval=-3,
                                                                   maxval=3)
            x = nn.get_variable(name='x', shape=shape, initializer=initializer)
            y = x * x / 2
            grads_and_vars = [
                (nn.clip_by_value(grad, max_value=1.0, min_value=-1.0), var)
                for grad, var in sgd.compute_gradients(y.sum(), wrt=x)
            ]
            op = sgd.apply_gradients(grads_and_vars)

        session = nn.Session()
        session.initialize()

        val_0 = session.run(outputs=x)
        session.run(updates=op)
        val_1_be = session.run(outputs=x)

        val_1_np = np.zeros(shape)
        val_1_np[val_0 > 1] = val_0[val_0 > 1] - 1
        val_1_np[val_0 < -1] = val_0[val_0 < -1] + 1
        np.testing.assert_almost_equal(val_1_be, val_1_np)
コード例 #12
0
    def test_conv2d(self):
        """Compnents consisting Conv2D layer are retrieved"""
        scope = self.get_scope()
        with nn.variable_scope(scope) as vs:
            input_ = nn.Input(shape=(32, 4, 8, 8), name='input')
            layer = nn.fetch_layer('Conv2D')(
                filter_height=4, filter_width=4, n_filters=4,
                strides=1, with_bias=True, name='Conv2D')
            output = layer(input_)
            filters = layer.get_parameter_variable('filter')
            bias = layer.get_parameter_variable('bias')

        with nn.variable_scope(vs, reuse=True):
            self.assertIs(filters, nn.get_variable('Conv2D/filter'))
            self.assertIs(bias, nn.get_variable('Conv2D/bias'))
            self.assertIs(output, nn.get_tensor('Conv2D/output'))
            self.assertIs(input_, nn.get_input('input'))
コード例 #13
0
    def test_get_variable_creates_variable(self):
        """get_variable create variable"""
        scope, var_name = self.get_scope(), 'foo'
        full_name = '/'.join([scope, var_name])

        self.assertTrue(full_name not in nn.core.base.wrapper._VARIABLES)
        with nn.variable_scope(scope, reuse=True):
            with self.assertRaises(ValueError):
                nn.get_variable(var_name)

        with nn.variable_scope(scope, reuse=False):
            variable = nn.get_variable(var_name, shape=[3, 1])
        self.assertTrue(full_name in nn.core.base.wrapper._VARIABLES)

        self.assertIs(variable, nn.core.base.wrapper._VARIABLES[full_name])
        with nn.variable_scope(scope, reuse=True):
            self.assertIs(variable, nn.get_variable(var_name))
コード例 #14
0
ファイル: scope_test.py プロジェクト: mthrok/luchador
    def test_get_variable_creates_variable(self):
        """get_variable create variable"""
        scope, var_name = self.get_scope(), 'foo'
        full_name = '/'.join([scope, var_name])

        self.assertTrue(full_name not in _VARIABLES)
        with nn.variable_scope(scope, reuse=True):
            with self.assertRaises(ValueError):
                nn.get_variable(var_name)

        with nn.variable_scope(scope, reuse=False):
            variable = nn.make_variable(var_name, shape=[3, 1])
        self.assertTrue(full_name in _VARIABLES)

        self.assertIs(variable, _VARIABLES[full_name])
        with nn.variable_scope(scope, reuse=True):
            self.assertIs(variable, nn.get_variable(var_name))
コード例 #15
0
 def get():
     x = nn.get_variable(name='x',
                         shape=[],
                         initializer=nn.initializer.ConstantInitializer(3))
     y = x * x
     return {
         'loss': y,
         'wrt': x,
     }
コード例 #16
0
def _get_y_equals_x_squared(scope, x_init):
    with nn.variable_scope(scope):
        x = nn.get_variable(
            name='x',
            shape=(),
            trainable=True,
            initializer=nn.initializer.ConstantInitializer(x_init))
        y = x * x
    return x, y
コード例 #17
0
ファイル: scope_test.py プロジェクト: mthrok/luchador
 def test_get_variable_reuse_variable(self):
     """get_variable create variable"""
     scope = self.get_scope()
     var1 = nn.make_variable(scope, shape=[3, 1])
     be._set_flag(True)
     var2 = nn.get_variable(scope)
     self.assertIs(
         var1.unwrap(), var2.unwrap(),
         'Reused variable should be identical to the original variable'
     )
コード例 #18
0
 def get():
     x = nn.get_variable(
         name='x',
         shape=[],
         initializer=nn.initializer.ConstantInitializer(2.0))
     y = (x - 1.5) * (x - 1) * (x - 1) * (x + 1) * (x + 1) * (x + 1.5)
     return {
         'loss': y,
         'wrt': x,
     }
コード例 #19
0
ファイル: activation_test.py プロジェクト: mot0/luchador
    def test_plrelu_parameter(self):
        """Parameter retrieval succeeds when train=True"""
        base_scope, scope, alpha, shape = self.get_scope(), 'foo', 0.1, (3, 4)
        with nn.variable_scope(base_scope):
            in_var = nn.Input(shape=shape)
            layer = nn.layer.LeakyReLU(alpha=alpha, train=True, scope=scope)
            layer(in_var)

        self.assertIs(layer.get_parameter_variable('alpha'),
                      nn.get_variable('{}/{}/alpha'.format(base_scope, scope)))
コード例 #20
0
def create_random_variable(shape,
                           dtype,
                           min_val=0,
                           max_val=1,
                           name='random_variable'):
    """Create Variable with uniform randoml values for test"""
    return nn.get_variable(name=name,
                           shape=shape,
                           dtype=dtype,
                           initializer=nn.initializer.UniformInitializer(
                               minval=min_val, maxval=max_val))
コード例 #21
0
ファイル: activation_test.py プロジェクト: mthrok/luchador
    def test_plrelu_parameter(self):
        """Parameter retrieval succeeds when train=True"""
        base_scope, scope, alpha, shape = self.get_scope(), 'foo', 0.1, (3, 4)
        with nn.variable_scope(base_scope):
            in_var = nn.Input(shape=shape)
            layer = nn.layer.LeakyReLU(alpha=alpha, train=True, scope=scope)
            layer(in_var)

        self.assertIs(
            layer.get_parameter_variable('alpha'),
            nn.get_variable('{}/{}/alpha'.format(base_scope, scope))
        )
コード例 #22
0
    def test_bn(self):
        """Compnents consisting BatchNormalization layer are retrieved"""
        base_scope, scope = self.get_scope(), 'BN'
        with nn.variable_scope(base_scope) as vs:
            input_ = nn.Input(shape=(32, 4), name='input')
            layer = nn.fetch_layer('BatchNormalization')(scope=scope)
            output = layer(input_)
            mean = layer.get_parameter_variable('mean')
            var = layer.get_parameter_variable('var')
            scale = layer.get_parameter_variable('scale')
            offset = layer.get_parameter_variable('offset')
            updates = layer.get_update_operations()

        with nn.variable_scope(vs, reuse=True):
            self.assertIs(mean, nn.get_variable('BN/mean'))
            self.assertIs(var, nn.get_variable('BN/var'))
            self.assertIs(scale, nn.get_variable('BN/scale'))
            self.assertIs(offset, nn.get_variable('BN/offset'))
            self.assertIs(output, nn.get_tensor('BN/output'))
            self.assertIs(updates[0], nn.get_operation('BN/update_mean'))
            self.assertIs(updates[1], nn.get_operation('BN/update_var'))
コード例 #23
0
    def _test_load_dataset(self, dtype1, dtype2):
        name = 'test_load_dataset_{}_{}'.format(dtype1, dtype2)
        shape = (3, 3)
        target_value = 10

        variable = nn.get_variable(name=name, shape=shape, dtype=dtype1)
        value = target_value * np.ones(shape, dtype=dtype2)

        session = nn.Session()
        session.load_dataset({name: value}, cast=not dtype1 == dtype2)

        updated_value = session.run(outputs=variable)
        self.assertTrue(np.all(target_value == updated_value))
コード例 #24
0
    def test_check_optimizer_slot(self):
        """Slot variables are updated when applying gradient directly"""
        name, b1_0, b2_0 = 'Adam', 0.5, 0.4
        opt = nn.optimizer.Adam(
            learning_rate=1.0, name=name, beta1=b1_0, beta2=b2_0)
        with nn.variable_scope(self.get_scope()) as vs:
            x = nn.Input(shape=(), name='x')
            w = nn.get_variable(shape=(), name='w')
            update_op = opt.minimize(w * x, w)

            vs.reuse_variables()
            dw = nn.get_tensor('{}_grad'.format(w.name))
            b1 = nn.get_variable('{}/beta1_power'.format(name))
            b2 = nn.get_variable('{}/beta2_power'.format(name))

        session = nn.Session()
        session.initialize()

        for i in range(10):
            b1_val, b2_val = session.run(outputs=[b1, b2])
            np.testing.assert_almost_equal(b1_val, b1_0 ** (i + 1))
            np.testing.assert_almost_equal(b2_val, b2_0 ** (i + 1))
            session.run(updates=update_op, givens={dw: 1.0})
コード例 #25
0
 def test_compute_gradients_with_trainables(self):
     """compute_gradients computes gradients for trainable wrt"""
     sgd = nn.optimizer.SGD(learning_rate=0.01)
     with nn.variable_scope(self.get_scope()):
         xs = [
             nn.get_variable(
                 name='x_{}'.format(i),
                 shape=(),
                 trainable=True,
             ) for i in range(3)
         ]
         y = xs[0] + xs[1] + xs[2]
         grads_and_vars = sgd.compute_gradients(y, wrt=xs)
     self.assertEqual(len(xs), len(grads_and_vars))
     for i, (grad, var) in enumerate(grads_and_vars):
         self.assertIs(xs[i], var)
         self.assertIsNotNone(grad)
コード例 #26
0
def _run_initializer(initializer, shape):
    if _transpose_needed(initializer, shape):
        # Shape is given in Theano's filter order, which is
        # [#out-channel, #in-channel, height, width].
        # So as to compute fan-in and fan-out correctly in Tensorflow,
        # we reorder this to
        # [height, width, #in-channel, #out-channel],
        shape = [shape[2], shape[3], shape[1], shape[0]]

    variable = nn.get_variable(
        shape=shape, name='input', initializer=initializer)
    session = nn.Session()
    session.initialize()
    value = session.run(outputs=variable)

    if _transpose_needed(initializer, shape):
        # So as to make the output comarison easy, we revert the oreder.
        shape = [shape[3], shape[2], shape[0], shape[1]]
    return value
コード例 #27
0
 def test_compute_gradients(self):
     """compute_gradients returns None for non-trainable wrt"""
     sgd = nn.optimizer.SGD(learning_rate=0.01)
     with nn.variable_scope(self.get_scope()):
         xs = [
             nn.get_variable(
                 name='x_{}'.format(i),
                 shape=(),
                 trainable=bool(i % 2),
             ) for i in range(5)
         ]
         y = xs[0] + xs[1] + xs[2] + xs[3] + xs[4]
         grads_and_vars = sgd.compute_gradients(y, wrt=xs)
     self.assertEqual(len(xs), len(grads_and_vars))
     for i, (grad, var) in enumerate(grads_and_vars):
         self.assertIs(xs[i], var)
         if i % 2:
             self.assertIsNotNone(grad)
         else:
             self.assertIsNone(grad)
コード例 #28
0
    def test_get_gradients(self):
        """gradients can be retrieved with get_tensor"""
        sgd = nn.optimizer.SGD(learning_rate=0.01)
        scope = self.get_scope()
        with nn.variable_scope(scope):
            xs = [
                nn.get_variable(
                    name='x_{}'.format(i),
                    shape=(),
                    trainable=True,
                ) for i in range(5)
            ]
            y = xs[0] + xs[1] + xs[2] + xs[3] + xs[4]
            grads_and_vars = sgd.compute_gradients(y, wrt=xs)

            for i in range(5):
                grad = nn.get_tensor('{}_grad'.format(xs[i].name))
                self.assertIs(grads_and_vars[i][0], grad)

        for i in range(5):
            grad = nn.get_tensor('{}/{}_grad'.format(scope, xs[i].name))
            self.assertIs(grads_and_vars[i][0], grad)
コード例 #29
0
    def test_apply_gradient_directory(self):
        """Variables can be updated by appyling gradient directly"""
        w_0 = 6
        with nn.variable_scope(self.get_scope()):
            x = nn.Input(shape=(), name='x')
            w = nn.get_variable(
                name='w', shape=(),
                initializer=nn.initializer.ConstantInitializer(w_0),
            )
            y = w * x

            sgd = nn.optimizer.SGD(learning_rate=1.0)
            update_op = sgd.minimize(y, w)
            dw = nn.get_tensor('{}_grad'.format(w.name))

            session = nn.Session()
            session.initialize()

            val0 = 3.
            session.run(updates=update_op, givens={dw: val0})
            val_w = session.run(outputs=w)

            np.testing.assert_almost_equal(val_w, w_0 - val0)