Пример #1
0
    def test_basic(self):
        utt.verify_grad(xlogy0, [numpy.random.rand(3, 4), numpy.random.rand(3, 4)])

        x = as_tensor_variable([1, 0])
        y = as_tensor_variable([1, 0])
        z = xlogy0(x, y)
        f = theano.function([], z)
        assert numpy.all(f() == numpy.asarray([0, 0.0]))
def binary_cross_entropy(target, output, sum_axis=1):
    XE = xlogy0(target, output) + xlogy0((1 - target), (1 - output))
    return -T.sum(XE, axis=sum_axis)
Пример #3
0
 def test3(self):
     x = as_tensor_variable([1, 0])
     y = as_tensor_variable([1, 0])
     z = xlogy0(x, y)
     f = theano.function([], z)
     self.assertTrue(numpy.all(f() == numpy.asarray([0, 0.])))
Пример #4
0
 def test3(self):
     x = as_tensor_variable([1, 0])
     y = as_tensor_variable([1, 0])
     z = xlogy0(x, y)
     f = theano.function([], z)
     self.assertTrue(numpy.all(f() == numpy.asarray([0, 0.])))