Beispiel #1
0
    def score(self,
              actual: np.array,
              predicted: np.array,
              sample_weight: typing.Optional[np.array] = None,
              labels: typing.Optional[np.array] = None) -> float:

        if sample_weight is not None:
            sample_weight = sample_weight.ravel()
        enc_actual, enc_predicted, labels = prep_actual_predicted(
            actual, predicted, labels)
        cm_weights = sample_weight if sample_weight is not None else None

        # multiclass
        if enc_predicted.shape[1] > 1:
            enc_predicted = enc_predicted.ravel()
            enc_actual = label_binarize(enc_actual, labels).ravel()
            cm_weights = np.repeat(
                cm_weights,
                predicted.shape[1]).ravel() if cm_weights is not None else None
            assert enc_predicted.shape == enc_actual.shape
            assert cm_weights is None or enc_predicted.shape == cm_weights.shape

        cms = daicx.confusion_matrices(enc_actual.ravel(),
                                       enc_predicted.ravel(),
                                       sample_weight=cm_weights)
        cms = cms.loc[cms[[
            self.__class__._threshold_optimizer
        ]].idxmax()]  # get row(s) for optimal metric defined above
        cms['metric'] = cms[['tp', 'fp', 'tn',
                             'fn']].apply(lambda x: self.protected_metric(*x),
                                          axis=1,
                                          raw=True)
        return cms['metric'].mean()  # in case of ties
    def score(self,
              actual: np.array,
              predicted: np.array,
              sample_weight: typing.Optional[np.array] = None,
              labels: typing.Optional[np.array] = None,
              X: typing.Optional[dt.Frame] = None,
              **kwargs) -> float:
        # can compute arbitrary cost from all original features
        if X is not None:
            assert X.nrows == len(actual)
            assert X.ncols >= 1
            X_pd = X.to_pandas()

        if sample_weight is not None:
            sample_weight = sample_weight.ravel()
        enc_actual, enc_predicted, labels = prep_actual_predicted(
            actual, predicted, labels)
        cm_weights = sample_weight if sample_weight is not None else None

        # multiclass
        if enc_predicted.shape[1] > 1:
            enc_predicted = enc_predicted.ravel()
            enc_actual = label_binarize(enc_actual, labels).ravel()
            cm_weights = np.repeat(
                cm_weights,
                predicted.shape[1]).ravel() if cm_weights is not None else None
            assert enc_predicted.shape == enc_actual.shape
            assert cm_weights is None or enc_predicted.shape == cm_weights.shape

        cms = daicx.confusion_matrices(enc_actual.ravel(),
                                       enc_predicted.ravel(),
                                       sample_weight=cm_weights)
        cms = cms.loc[cms[[
            self.__class__._threshold_optimizer
        ]].idxmax()]  # get row(s) for optimal metric defined above
        cms['metric'] = cms[['tp', 'fp', 'tn', 'fn']].apply(
            lambda x: self.protected_metric(*x, X_pd), axis=1, raw=True)
        return cms['metric'].mean()  # in case of ties
Beispiel #3
0
def test_metrics():
    actual=np.array([1,2,3,4])
    predicted = np.array([1.1, 2, 3.5, 4])

    score = daicx.ll(actual, predicted)
    print("score")
    print(score)
    assert np.array_equal(score, np.array([ 9.9920072216264148e-16, -3.4539575992340879e+01, -6.9079151984681758e+01, -1.0361872797702264e+02])), "ll failed"

    score = daicx.log_loss(actual, predicted)
    print("score")
    print(score)
    assert score == -51.80936398851132, "logloss failed"

    score = daicx.se(actual, predicted)
    print("score")
    print(score)
    assert np.array_equal(score, np.array([0.010000000000000018, 0.,  0.25, 0.])), "se failed"

    score = daicx.mse(actual, predicted)
    print("score")
    print(score)
    assert score == 0.065, "mse failed"

    score = daicx.rmse(actual, predicted)
    print("score")
    print(score)
    assert score == 0.25495097567963926, "rmse failed"

    score = daicx.ce(actual, predicted)
    print("score")
    print(score)
    assert score == 0.5, "ce failed"

    score = daicx.ae(actual, predicted)
    print("score")
    print(score)
    assert np.array_equal(score, np.array([0.10000000000000009, 0., 0.5, 0.])), "ae failed"

    score = daicx.mae(actual, predicted)
    print("score")
    print(score)
    assert score == 0.15000000000000002, "mae failed"

    score = daicx.sle(actual, predicted)
    print("score")
    print(score)
    assert np.allclose(score, np.array([0.002380480119680131, 0., 0.013872843488432929, 0.])), "sle failed"

    score = daicx.msle(actual, predicted)
    print("score")
    print(score)
    assert score == 0.004063330902028265, "msle failed"

    score = daicx.rmsle(actual, predicted)
    print("score")
    print(score)
    assert score == 0.06374426171843443, "rmsle failed"

    score = daicx.auc(actual, predicted)
    print("score")
    print(score)
    assert score == 0.0, "auc failed"

    rank = daicx.tied_rank(actual)
    print("rank")
    print(rank)
    assert np.array_equal(rank, np.array([1.0, 2.0, 3.0, 4.0])), "rank failed"

    score = daicx.f05_opt(actual, predicted)
    print("score")
    print(score)
    assert score == 3.2142857142857144, "f05_opt failed"

    score = daicx.f1_opt(actual, predicted)
    print("score")
    print(score)
    assert score == 1.6363636363636365, "f1_opt failed"

    score = daicx.f2_opt(actual, predicted)
    print("score")
    print(score)
    assert score == 1.1363636363636365, "f2_opt failed"

    score = daicx.mcc_opt(actual, predicted)
    print("score")
    print(score)
    assert score == 0.0, "mcc_opt failed"

    score = daicx.acc_opt(actual, predicted)
    print("score")
    print(score)
    assert score == 2.75, "acc_opt failed"

    matrix = daicx.confusion_matrices(actual, predicted)
    print("matrix")
    print(matrix)#assert score == 1.0, "mcc_opt failed"