示例#1
0
 def get_loss(self, y_pred, y_true, X=None, training=False):
     y_true = to_var(y_true, use_cuda=False)
     loss_a = torch.abs(y_true.float() - y_pred[:, 1]).mean()
     loss_b = ((y_true.float() - y_pred[:, 1])**2).mean()
     if training:
         self.history.record_batch('loss_a', to_numpy(loss_a)[0])
         self.history.record_batch('loss_b', to_numpy(loss_b)[0])
     return loss_a + loss_b
示例#2
0
    def test_dropout(self, net_fit, data):
        # Note: does not test that dropout is really active during
        # training.
        X = data[0]

        # check that dropout not active by default
        y_proba = to_numpy(net_fit.forward(X))
        y_proba2 = to_numpy(net_fit.forward(X))
        assert np.allclose(y_proba, y_proba2, rtol=1e-7)

        # check that dropout can be activated
        y_proba = to_numpy(net_fit.forward(X, training=True))
        y_proba2 = to_numpy(net_fit.forward(X, training=True))
        assert not np.allclose(y_proba, y_proba2, rtol=1e-7)