示例#1
0
def adaboost_classifier(model, inputs, method="predict_proba"):
    """
    Creates a SKAST expression corresponding to a given adaboost classifier.
    """
    divisor = model.estimator_weights_.sum()
    if method == 'decision_function':
        divisor /= (model.n_classes_ - 1)
    tree_exprs = [
        decision_tree(e.tree_,
                      method='predict_proba' if
                      (model.algorithm == 'SAMME.R'
                       or method.startswith('predict_')) else 'predict',
                      inputs=inputs,
                      value_transform=adaboost_values(model, w / divisor,
                                                      method))
        for e, w in zip(model.estimators_, model.estimator_weights_)
    ]
    decision = sum_(tree_exprs)

    if method == 'decision_function':
        if model.n_classes_ == 2:
            decision = decision @ const([-1, 1])
        return decision
    elif method == 'predict':
        return func.ArgMax(decision)
    else:
        return classifier(
            vecsumnormalize(func.Exp(sum_(tree_exprs)), model.n_classes_),
            method)
def gradient_boosting_classifier(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given gradient boosting classifier

    At the moment we only support model's decision_function method.
    FYI: Conversion to probabilities and a prediction depends on the loss and by default
          is done as np.exp(score - (logsumexp(score, axis=1)[:, np.newaxis])))
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        vector([
            decision_tree(estimator.tree_,
                          inputs,
                          method="predict",
                          value_transform=lambda v: v * model.learning_rate)
            for estimator in iteration
        ]) for iteration in model.estimators_
    ]
    # Here we rely on the fact that DummyClassifier.predict() does not really read the input vectors.
    # Consequently model.loss_.get_init_raw_predictions([<whatever>], model.<DummyClasifier>) kind-of-works.
    return sum_(
        tree_exprs +
        [const(model.loss_.get_init_raw_predictions([[]], model.init_)[0])])
示例#3
0
def random_forest_classifier(model, inputs, method="predict_proba"):
    """
    Creates a SKAST expression corresponding to a given random forest classifier
    """
    trees = [
        decision_tree(estimator.tree_,
                      inputs,
                      method="predict_proba",
                      value_transform=lambda v: v / len(model.estimators_))
        for estimator in model.estimators_
    ]
    return classifier(sum_(trees), method)
示例#4
0
def random_forest_regressor(model, inputs):
    """
    Creates a SKAST expression corresponding to a given random forest regressor
    """

    return sum_([
        decision_tree(estimator.tree_,
                      inputs=inputs,
                      method="predict",
                      value_transform=lambda v: v / len(model.estimators_))
        for estimator in model.estimators_
    ])
def gradient_boosting_regressor(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given GB regressor.
    
    The logic is mostly the same as for the classifier, except we work with scalars rather than vectors.
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        decision_tree(iteration[0].tree_,
                      inputs,
                      method="predict",
                      value_transform=lambda v: v * model.learning_rate)
        for iteration in model.estimators_
    ]
    return sum_(tree_exprs + [const(model.init_.mean)])
def gradient_boosting_classifier(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given gradient boosting classifier

    At the moment we only support model's decision_function method.
    FYI: Conversion to probabilities and a prediction depends on the loss and by default
          is done as np.exp(score - (logsumexp(score, axis=1)[:, np.newaxis])))
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        vector([
            decision_tree(estimator.tree_,
                          inputs,
                          method="predict",
                          value_transform=lambda v: v * model.learning_rate)
            for estimator in iteration
        ]) for iteration in model.estimators_
    ]
    return sum_(tree_exprs + [const(model.init_.priors)])