Exemplo n.º 1
0
def parabolic_crossentropy(nr_of_categories, delta=1, beta=1e10, dtype='float32'):
    argmax = DifferentiableArgmax(nr_of_categories, beta, dtype=dtype)
    offset = LazyInit(lambda: _K.constant(delta / 2, dtype=dtype))
    f = LazyInit(lambda: _K.constant((nr_of_categories + delta) / nr_of_categories, dtype=dtype))

    def loss_function(y_true, y_pred):
        return ((argmax(y_pred) + offset()) - (argmax(y_true)) * f()) ** 2

    return loss_function
Exemplo n.º 2
0
def normal_penalized_crossentropy(nr_of_categories, alpha=10, beta=1e10, max_z=4, dtype='float32'):
    max_cat_int = nr_of_categories - 1
    _alpha = LazyInit(lambda: _K.constant(alpha, dtype=dtype))
    _mue = LazyInit(lambda: _K.constant(max_cat_int / 2, dtype=dtype))
    _sigma = LazyInit(lambda: _K.constant((max_cat_int - max_cat_int / 2) / abs(max_z), dtype=dtype))
    _argmax = DifferentiableArgmax(nr_of_categories, beta, dtype=dtype)
    _norm_dist = NormDistWeightedPenalizer(dtype)

    def loss_function(y_true, y_pred):
        # 1st we calculate the cross entropy
        loss = categorical_crossentropy(y_true, y_pred)

        # then we use the normal shaped probability distrubution to penalize the prediction
        penalty = _norm_dist(_argmax(y_pred), _mue(), _sigma()) * _alpha()

        return loss * penalty

    return loss_function
Exemplo n.º 3
0
    def test_copy(self):
        l = LazyInit(lambda: 12)

        self.assertIsNone(l.value)
        self.assertEqual(12, l())
        self.assertIsNotNone(l.value)

        l2 = deepcopy(l)
        self.assertIsNone(l2.value)
        self.assertEqual(12, l2())
Exemplo n.º 4
0
    def test_serialization(self):
        l = LazyInit(lambda: 12)

        self.assertIsNone(l.value)
        self.assertEqual(12, l())
        self.assertIsNotNone(l.value)

        l2 = deserializeb(serializeb(l))
        self.assertIsNone(l2.value)
        self.assertEqual(12, l2())
 def test_serialization(self):
     """given"""
     lazy_val = LazyInit(lambda: K.constant(12))
     val = lazy_val()
     """when"""
     serialize(lazy_val, '/tmp/pandas_ml_common_test.dill')
     lazy_val_2 = deserialize('/tmp/pandas_ml_common_test.dill', LazyInit)
     """then"""
     self.assertEqual(12, K.eval(val))
     self.assertEqual(12, K.eval(lazy_val_2()))
Exemplo n.º 6
0
    def _record_loss(self, epoch, fold, fold_epoch, train_data: XYWeight, test_data: List[XYWeight], verbose, callbacks, loss_history_key=None):
        train_loss = self.calculate_loss(fold, train_data.x, train_data.y, train_data.weight)
        self._history["train", loss_history_key or fold][(epoch, fold_epoch)] = train_loss

        if len(test_data) > 0:
            test_loss = np.array([self.calculate_loss(fold, x, y, w) for x, y, w in test_data if len(x) > 0]).mean()
        else:
            test_loss = np.NaN
        self._history["test", loss_history_key or fold][(epoch, fold_epoch)] = test_loss

        self.after_fold_epoch(epoch, fold, fold_epoch, train_loss, test_loss)
        if verbose > 0:
            print(f"epoch: {epoch}, train loss: {train_loss}, test loss: {test_loss}")

        call_callable_dynamic_args(
            callbacks,
            epoch=epoch, fold=fold, fold_epoch=fold_epoch, loss=train_loss, val_loss=test_loss,
            y_train=train_data.y, y_test=[td.y for td in test_data],
            y_hat_train=LazyInit(lambda: self.predict(train_data.x)),
            y_hat_test=[LazyInit(lambda: self.predict(td.x)) for td in test_data]
        )
Exemplo n.º 7
0
 def __init__(self, dtype='float32'):
     self.one = LazyInit(lambda: _K.constant(1, dtype=dtype))
     self.two = LazyInit(lambda: _K.constant(2, dtype=dtype))
     self.pi = LazyInit(lambda: _K.constant(3.1415, dtype=dtype))
     self.e = LazyInit(lambda: _K.constant(2.7182, dtype=dtype))
Exemplo n.º 8
0
 def __init__(self, nr_of_categories, beta=1e10, dtype='float32'):
     self.y_range = LazyInit(lambda: _K.arange(0, nr_of_categories, dtype=dtype))
     self.beta = beta