def mlp(model, inputs):
    actns = [model.activation] * (len(model.coefs_) - 1) + [
        model.out_activation_
    ]
    outs = inputs
    for M, b, a in zip(model.coefs_, model.intercepts_, actns):
        outs = _activations[a](const(M.T) @ outs + const(b))
    return outs
Exemple #2
0
 def TensorSharedVariable(self, node):
     if node.name is None:
         return const(node.container.value)
     refname = node.name.replace('/', '_')
     if refname not in self.definitions:
         res = const(node.container.value)
         if isinstance(res, ast.MatrixConstant):
             # Keras always multiplies matrices on the right, we prefer the other way
             res = const(res.value.T)
         self.definitions[refname] = res
     return ref(refname, self.definitions[refname])
Exemple #3
0
def normalizer(norm, inputs):
    if norm == 'l2':
        norm = func.Sqrt(func.VecSum(inputs * inputs))
    elif norm == 'l1':
        norm = func.VecSum(func.Abs(inputs))
    elif norm == 'max':
        norm = func.VecMax(inputs)
    else:
        raise ValueError("Unknown norm {0}".format(norm))
    norm_fix = iif(ref('norm', norm) == const(0), const(1), ref('norm', norm))
    return let(defn(norm=norm), defn(norm_fix=norm_fix),
               inputs / vector([ref('norm_fix', norm_fix)] * len(inputs)))
Exemple #4
0
def adaboost_classifier(model, inputs, method="predict_proba"):
    """
    Creates a SKAST expression corresponding to a given adaboost classifier.
    """
    divisor = model.estimator_weights_.sum()
    if method == 'decision_function':
        divisor /= (model.n_classes_ - 1)
    tree_exprs = [
        decision_tree(e.tree_,
                      method='predict_proba'
                      if model.algorithm == 'SAMME.R' else 'predict',
                      inputs=inputs,
                      value_transform=adaboost_values(model, w / divisor,
                                                      method))
        for e, w in zip(model.estimators_, model.estimator_weights_)
    ]
    decision = sum_(tree_exprs)

    if method == 'decision_function':
        if model.n_classes_ == 2:
            decision = decision @ const([-1, 1])
        return decision
    elif method == 'predict':
        return func.ArgMax(decision)
    else:
        return classifier(sklearn_softmax(decision, model.n_classes_), method)
def gradient_boosting_classifier(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given gradient boosting classifier

    At the moment we only support model's decision_function method.
    FYI: Conversion to probabilities and a prediction depends on the loss and by default
          is done as np.exp(score - (logsumexp(score, axis=1)[:, np.newaxis])))
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        vector([
            decision_tree(estimator.tree_,
                          inputs,
                          method="predict",
                          value_transform=lambda v: v * model.learning_rate)
            for estimator in iteration
        ]) for iteration in model.estimators_
    ]
    # Here we rely on the fact that DummyClassifier.predict() does not really read the input vectors.
    # Consequently model.loss_.get_init_raw_predictions([<whatever>], model.<DummyClasifier>) kind-of-works.
    return sum_(
        tree_exprs +
        [const(model.loss_.get_init_raw_predictions([[]], model.init_)[0])])
Exemple #6
0
def k_means(cluster_centers, inputs, method):
    res = []
    for c in cluster_centers:
        dx = inputs - const(c)
        res.append(let(defn(dx=dx), ref('dx', dx) @ ref('dx', dx)))
    
    sq_dists = vector(res)
    if method == 'transform':
        return func.Sqrt(sq_dists)
    elif method == 'predict':
        return func.ArgMax(sq_dists * -1)
    else:
        raise ValueError("Unsupported methods for KMeans: {0}".format(method))
Exemple #7
0
    def walk(self, node_id=0):
        if node_id >= self.tree.node_count or node_id < 0:
            raise ValueError("Invalid node id")
        if self.tree.children_left[node_id] == -1:
            if self.tree.children_right[node_id] != -1:
                raise ValueError(
                    "Invalid tree structure. Children must either be both present or absent"
                )

            if self.values.ndim == 1:
                return const(self.values[node_id].item())
            else:
                return const(self.values[node_id])
        else:
            ft = self.tree.feature[node_id]
            if ft < 0 or ft >= self.tree.n_features:
                raise ValueError(
                    "Invalid feature value for node {0}".format(node_id))
            return iif(
                self.features[ft] <= const(
                    self.tree.threshold[node_id].item()),
                self.walk(self.tree.children_left[node_id]),
                self.walk(self.tree.children_right[node_id]))
Exemple #8
0
def linear_model(coef, intercept, inputs):
    """
    Linear regression.
    Depending on the shape of the coef and intercept, produces either a single-valued
    linear model (w @ x + b) or a multi-valued one (M @ x + b_vec)

    Args:

        coef (np.array): A vector (1D array, for single-valued model) or a matrix (2D array, for multi-valued one) for the model.
        intercept:  a number (for single-valued) or a 1D array (for multi-valued regression).
        inputs:  a list of AST nodes to be used as the input vector to the model or a single node, corresponding to a vector.
    """

    single_valued = (coef.ndim == 1)
    if single_valued and hasattr(intercept, '__iter__'):
        raise ValueError(
            "Single-valued linear model must have a single value for the intercept"
        )
    elif not single_valued and (coef.ndim != 2 or intercept.ndim != 1):
        raise ValueError(
            "Multi-valued linear model must have a 2D coefficient matrix and a 1D intercept vector"
        )

    return const(coef) @ inputs + const(intercept)
def gradient_boosting_regressor(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given GB regressor.
    
    The logic is mostly the same as for the classifier, except we work with scalars rather than vectors.
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        decision_tree(iteration[0].tree_,
                      inputs,
                      method="predict",
                      value_transform=lambda v: v * model.learning_rate)
        for iteration in model.estimators_
    ]
    return sum_(tree_exprs + [const(model.init_.mean)])
def gradient_boosting_classifier(model, inputs, method="decision_function"):
    """
    Creates a SKAST expression corresponding to a given gradient boosting classifier

    At the moment we only support model's decision_function method.
    FYI: Conversion to probabilities and a prediction depends on the loss and by default
          is done as np.exp(score - (logsumexp(score, axis=1)[:, np.newaxis])))
    """

    if method != "decision_function":
        raise NotImplementedError(
            "Only decision_function is implemented for gradient boosting models so far"
        )

    tree_exprs = [
        vector([
            decision_tree(estimator.tree_,
                          inputs,
                          method="predict",
                          value_transform=lambda v: v * model.learning_rate)
            for estimator in iteration
        ]) for iteration in model.estimators_
    ]
    return sum_(tree_exprs + [const(model.init_.priors)])
Exemple #11
0
 def TensorConstant(self, node):
     return const(node.data)
"""
Multilayer perceptron
"""
from skompiler.dsl import func, const
from ..common import classifier

_activations = {
    'identity': lambda x: x,
    'tanh': lambda x: func.Sigmoid(x * 2) * 2 - 1,
    'logistic': func.Sigmoid,
    'relu': lambda x: func.Max(x, const([0] * len(x))),
    'softmax': func.Softmax
}


def mlp(model, inputs):
    actns = [model.activation] * (len(model.coefs_) - 1) + [
        model.out_activation_
    ]
    outs = inputs
    for M, b, a in zip(model.coefs_, model.intercepts_, actns):
        outs = _activations[a](const(M.T) @ outs + const(b))
    return outs


def mlp_classifier(model, inputs, method):
    out = mlp(model, inputs)
    if model.n_outputs_ == 1 and method == 'predict':
        # Binary classifier
        return func.Step(out - 0.5)
    else:
Exemple #13
0
def binarize(threshold, inputs):
    if not isinstance(inputs, list):
        inputs = decompose(inputs)
    return vector(
        [iif(inp <= const(threshold), const(0), const(1)) for inp in inputs])
Exemple #14
0
def standard_scaler(model, inputs):
    if model.with_mean:
        inputs = inputs - const(model.mean_)
    if model.with_std:
        inputs = inputs / const(model.scale_)
    return inputs
Exemple #15
0
def unscale(scale_, inputs):
    return inputs / const(scale_)
Exemple #16
0
def scale(scale_, min_, inputs):
    return inputs * const(scale_) + const(min_)