Exemple #1
0
 def test_gpusplit_opt(self):
     rng = numpy.random.RandomState(seed=utt.fetch_seed())
     m = self.shared(rng.rand(4, 6).astype(self.floatX))
     o = T.Split(2)(m, 0, [2, 2])
     f = theano.function([], o, mode=self.mode)
     assert any([isinstance(node.op, self.split_op_class)
                 for node in f.maker.fgraph.toposort()])
     o1, o2 = f()
     assert numpy.allclose(o1, m.get_value(borrow=True)[:2])
     assert numpy.allclose(o2, m.get_value(borrow=True)[2:])
Exemple #2
0
 def test_gpusplit_opt(self):
     # Test that we move the node to the GPU
     # Also test float16 computation at the same time.
     rng = np.random.RandomState(seed=utt.fetch_seed())
     m = self.shared(rng.rand(4, 6).astype('float16'))
     o = T.Split(2)(m, 0, [2, 2])
     assert o[0].dtype == 'float16'
     f = theano.function([], o, mode=self.mode)
     assert any([isinstance(node.op, self.split_op_class)
                 for node in f.maker.fgraph.toposort()])
     o1, o2 = f()
     assert np.allclose(o1, m.get_value(borrow=True)[:2])
     assert np.allclose(o2, m.get_value(borrow=True)[2:])