def __call__(self, x0, x1, label):
     y0 = self.forward_once(x0)
     y1 = self.forward_once(x1)
     label = chainer.Variable(label)
     loss = contrastive(y0, y1, label)
     reporter.report({'loss': loss}, self)
     return loss
Example #2
0
    def forward(self, x0, x1, label, train=True):
        chainer.config.train = train

        y0 = self.forward_once(x0, train)
        y1 = self.forward_once(x1, train)
        label = Variable(label)

        return contrastive(y0, y1, label)
    def check_backward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = contrastive(x0, x1, t, use_cudnn)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data,), (1,))
        gx1, = gradient_check.numerical_grad(f, (x1.data,), (1,))

        gradient_check.assert_allclose(gx0, x0.grad)
        gradient_check.assert_allclose(gx1, x1.grad)
    def check_backward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0 = chainer.Variable(x0_data)
        x1 = chainer.Variable(x1_data)
        t = chainer.Variable(t_data)
        loss = contrastive(x0, x1, t, use_cudnn)
        loss.backward()
        self.assertEqual(None, t.grad)

        func = loss.creator
        f = lambda: func.forward((x0.data, x1.data, t.data))
        gx0, = gradient_check.numerical_grad(f, (x0.data, ), (1, ))
        gx1, = gradient_check.numerical_grad(f, (x1.data, ), (1, ))

        gradient_check.assert_allclose(gx0, x0.grad)
        gradient_check.assert_allclose(gx1, x1.grad)
    def check_forward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0_val = chainer.Variable(x0_data)
        x1_val = chainer.Variable(x1_data)
        t_val = chainer.Variable(t_data)
        loss = contrastive(x0_val, x1_val, t_val, self.margin, use_cudnn)
        self.assertEqual(loss.data.shape, ())
        self.assertEqual(loss.data.dtype, numpy.float32)
        loss_value = float(cuda.to_cpu(loss.data))

        # Compute expected value
        loss_expect = 0
        for i in six.moves.range(self.x0.shape[0]):
            x0d, x1d, td = self.x0[i], self.x1[i], self.t[i]
            d = numpy.sum((x0d - x1d) ** 2)
            if td == 1:  # similar pair
                loss_expect += d
            elif td == 0:  # dissimilar pair
                loss_expect += max(1 - math.sqrt(d), 0) ** 2
        loss_expect /= 2.0 * self.t.shape[0]
        self.assertAlmostEqual(loss_expect, loss_value, places=5)
    def check_forward(self, x0_data, x1_data, t_data, use_cudnn=True):
        x0_val = chainer.Variable(x0_data)
        x1_val = chainer.Variable(x1_data)
        t_val = chainer.Variable(t_data)
        loss = contrastive(x0_val, x1_val, t_val, self.margin, use_cudnn)
        self.assertEqual(loss.data.shape, ())
        self.assertEqual(loss.data.dtype, numpy.float32)
        loss_value = float(cuda.to_cpu(loss.data))

        # Compute expected value
        loss_expect = 0
        for i in six.moves.range(self.x0.shape[0]):
            x0d, x1d, td = self.x0[i], self.x1[i], self.t[i]
            d = numpy.sum((x0d - x1d)**2)
            if td == 1:  # similar pair
                loss_expect += d
            elif td == 0:  # dissimilar pair
                loss_expect += max(1 - math.sqrt(d), 0)**2
        loss_expect /= 2.0 * self.t.shape[0]
        self.assertAlmostEqual(loss_expect, loss_value, places=5)
    def forward(self, x0, x1, label, train=True):
        y0 = self.forward_once(x0, train)
        y1 = self.forward_once(x1, train)
        label = Variable(label, volatile=not train)

        return contrastive(y0, y1, label)
Example #8
0
    def forward(self, x0, x1, label, train=True):
        y0 = self.forward_once(x0, train)
        y1 = self.forward_once(x1, train)
        label = Variable(label, volatile=not train)

        return contrastive(y0, y1, label)