Exemplo n.º 1
0
 def test_timestep_weighted(self):
     msle_obj = losses.MeanSquaredLogarithmicError()
     y_true = K.constant([1, 9, 2, -5, -2, 6], shape=(2, 3, 1))
     y_pred = K.constant([4, 8, 12, 8, 1, 3], shape=(2, 3, 1))
     sample_weight = K.constant([3, 6, 5, 0, 4, 2], shape=(2, 3))
     loss = msle_obj(y_true, y_pred, sample_weight=sample_weight)
     assert np.allclose(K.eval(loss), 2.6473, atol=1e-3)
Exemplo n.º 2
0
 def test_sample_weighted(self):
     msle_obj = losses.MeanSquaredLogarithmicError()
     y_true = K.constant([1, 9, 2, -5, -2, 6], shape=(2, 3))
     y_pred = K.constant([4, 8, 12, 8, 1, 3], shape=(2, 3))
     sample_weight = K.constant([1.2, 3.4], shape=(2, 1))
     loss = msle_obj(y_true, y_pred, sample_weight=sample_weight)
     assert np.allclose(K.eval(loss), 3.7856, atol=1e-3)
Exemplo n.º 3
0
 def test_unweighted(self):
     msle_obj = losses.MeanSquaredLogarithmicError()
     y_true = K.constant([1, 9, 2, -5, -2, 6], shape=(2, 3))
     y_pred = K.constant([4, 8, 12, 8, 1, 3], shape=(2, 3))
     loss = msle_obj(y_true, y_pred)
     assert np.allclose(K.eval(loss), 1.4370, atol=1e-3)
Exemplo n.º 4
0
 def test_config(self):
     msle_obj = losses.MeanSquaredLogarithmicError(
         reduction=losses_utils.Reduction.SUM, name='mape_1')
     assert msle_obj.name == 'mape_1'
     assert msle_obj.reduction == losses_utils.Reduction.SUM