Example #1
0
    def setup(self):

        from neon.backends.cc2 import GPU, GPUTensor

        # TODO: remove randomness from expected target results
        self.be = GPU(rng_seed=0)

        # reusable fake data
        self.inputs = GPUTensor(np.ones((2, 100)))

        # create fake layer
        nin = 2
        conf = {
            'name': 'testlayer',
            'num_nodes': 2,
            'weight_init': GaussianValGen(backend=self.be, loc=0.0, scale=0.01)
        }
        lr_params = {'learning_rate': 0.01}
        thislr = {'type': 'gradient_descent', 'lr_params': lr_params}
        activation = Logistic()
        self.layer = RBMLayer(name=conf['name'])
        # create fake cost
        self.cost = SumSquaredDiffs(olayer=self.layer)
        self.layer.initialize({
            'backend': self.be,
            'batch_size': 100,
            'lrule_init': thislr,
            'nin': nin,
            'nout': conf['num_nodes'],
            'activation': activation,
            'weight_init': conf['weight_init']
        })
Example #2
0
    def setup(self):

        from neon.backends.cc2 import GPU, GPUTensor

        # TODO: remove randomness from expected target results
        self.be = GPU(rng_seed=0)

        # reusable fake data
        self.inputs = GPUTensor(np.ones((2, 100)))

        # create fake layer
        nin = 2
        conf = {'name': 'testlayer', 'num_nodes': 2,
                'weight_init': GaussianValGen(backend=self.be, loc=0.0,
                                              scale=0.01)}
        lr_params = {'learning_rate': 0.01}
        thislr = {'type': 'gradient_descent', 'lr_params': lr_params}
        activation = Logistic()
        self.layer = RBMLayer(name=conf['name'])
        # create fake cost
        self.cost = SumSquaredDiffs(olayer=self.layer)
        self.layer.initialize({'backend': self.be, 'batch_size': 100,
                               'lrule_init': thislr, 'nin': nin,
                               'nout': conf['num_nodes'],
                               'activation': activation,
                               'weight_init': conf['weight_init']})
Example #3
0
class TestCudaRBM:

    def setup(self):

        from neon.backends.cc2 import GPU, GPUTensor

        # TODO: remove randomness from expected target results
        self.be = GPU(rng_seed=0)

        # reusable fake data
        self.inputs = GPUTensor(np.ones((2, 100)))

        # create fake layer
        nin = 2
        conf = {'name': 'testlayer', 'num_nodes': 2,
                'weight_init': GaussianValGen(backend=self.be, loc=0.0,
                                              scale=0.01)}
        lr_params = {'learning_rate': 0.01}
        thislr = {'type': 'gradient_descent', 'lr_params': lr_params}
        activation = Logistic()
        self.layer = RBMLayer(name=conf['name'])
        # create fake cost
        self.cost = SumSquaredDiffs(olayer=self.layer)
        self.layer.initialize({'backend': self.be, 'batch_size': 100,
                               'lrule_init': thislr, 'nin': nin,
                               'nout': conf['num_nodes'],
                               'activation': activation,
                               'weight_init': conf['weight_init']})

    def test_cudanet_positive(self):
        self.layer.positive(self.inputs)
        target = np.array([0.50541031, 0.50804842],
                          dtype='float32')
        assert_tensor_near_equal(self.layer.p_hid_plus.asnumpyarray()[:, 0],
                                 target)

    def test_cudanet_negative(self):
        self.layer.positive(self.inputs)
        self.layer.negative(self.inputs)
        target = np.array([0.50274211,  0.50407821],
                          dtype='float32')
        assert_tensor_near_equal(self.layer.p_hid_minus.asnumpyarray()[:, 0],
                                 target)

    @nottest  # TODO: remove randomness
    def test_cudanet_cost(self):
        self.layer.positive(self.inputs)
        self.layer.negative(self.inputs)
        thecost = self.cost.apply_function(self.inputs)
        target = 106.588943481
        assert_tensor_near_equal(thecost, target)
Example #4
0
class TestCudaRBM:
    def setup(self):

        from neon.backends.cc2 import GPU, GPUTensor

        # TODO: remove randomness from expected target results
        self.be = GPU(rng_seed=0)

        # reusable fake data
        self.inputs = GPUTensor(np.ones((2, 100)))

        # create fake layer
        nin = 2
        conf = {
            'name': 'testlayer',
            'num_nodes': 2,
            'weight_init': GaussianValGen(backend=self.be, loc=0.0, scale=0.01)
        }
        lr_params = {'learning_rate': 0.01}
        thislr = {'type': 'gradient_descent', 'lr_params': lr_params}
        activation = Logistic()
        self.layer = RBMLayer(name=conf['name'])
        # create fake cost
        self.cost = SumSquaredDiffs(olayer=self.layer)
        self.layer.initialize({
            'backend': self.be,
            'batch_size': 100,
            'lrule_init': thislr,
            'nin': nin,
            'nout': conf['num_nodes'],
            'activation': activation,
            'weight_init': conf['weight_init']
        })

    def test_cudanet_positive(self):
        self.layer.positive(self.inputs)
        target = np.array([0.50541031, 0.50804842], dtype='float32')
        assert_tensor_near_equal(self.layer.p_hid_plus.asnumpyarray()[:, 0],
                                 target)

    def test_cudanet_negative(self):
        self.layer.positive(self.inputs)
        self.layer.negative(self.inputs)
        target = np.array([0.50274211, 0.50407821], dtype='float32')
        assert_tensor_near_equal(self.layer.p_hid_minus.asnumpyarray()[:, 0],
                                 target)

    @nottest  # TODO: remove randomness
    def test_cudanet_cost(self):
        self.layer.positive(self.inputs)
        self.layer.negative(self.inputs)
        thecost = self.cost.apply_function(self.inputs)
        target = 106.588943481
        assert_tensor_near_equal(thecost, target)