Пример #1
0
def test_nested_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    bool_test = ast.CompExpr(ast.NumVal(1), left, ast.CompOpType.EQ)

    expr_nested = ast.IfExpr(bool_test, ast.FeatureRef(2), ast.NumVal(2))

    expr = ast.IfExpr(bool_test, expr_nested, ast.NumVal(2))

    expected_code = """
let score (input : double list) =
    let func0 =
        if ((1.0) = (1.0)) then
            1.0
        else
            2.0
    let func1 =
        if ((1.0) = ((func0) + (2.0))) then
            if ((1.0) = ((func0) + (2.0))) then
                input.[2]
            else
                2.0
        else
            2.0
    func1
"""

    interpreter = FSharpInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #2
0
    def _assemble_tree(self, tree):
        if "leaf_value" in tree:
            return ast.NumVal(tree["leaf_value"])

        threshold = ast.NumVal(tree["threshold"])
        feature_ref = ast.FeatureRef(tree["split_feature"])

        op = ast.CompOpType.from_str_op(tree["decision_type"])
        assert op == ast.CompOpType.LTE, "Unexpected comparison op"

        missing_type = tree['missing_type']

        if missing_type not in {"NaN", "None"}:
            raise ValueError(f"Unknown missing_type: {missing_type}")

        reverse_condition = missing_type == "NaN" and tree["default_left"]
        reverse_condition |= missing_type == "None" and tree["threshold"] >= 0
        if reverse_condition:
            op = ast.CompOpType.GT
            true_child = tree["right_child"]
            false_child = tree["left_child"]
        else:
            true_child = tree["left_child"]
            false_child = tree["right_child"]

        return ast.IfExpr(ast.CompExpr(feature_ref, threshold, op),
                          self._assemble_tree(true_child),
                          self._assemble_tree(false_child))
Пример #3
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
let score (input : double list) =
    let func0 =
        if ((1.0) = (1.0)) then
            1.0
        else
            2.0
    let func1 =
        if (((func0) + (2.0)) >= ((1.0) / (2.0))) then
            1.0
        else
            input.[0]
    func1
"""

    interpreter = FSharpInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #4
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
score <- function(input) {
    if ((1) == (1)) {
        var1 <- 1
    } else {
        var1 <- 2
    }
    if (((var1) + (2)) >= ((1) / (2))) {
        var0 <- 1
    } else {
        var0 <- input[1]
    }
    return(var0)
}
"""

    interpreter = RInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #5
0
def test_statsmodels_glm_negativebinomial_link_func():
    estimator = utils.StatsmodelsSklearnLikeWrapper(
        sm.GLM,
        dict(init=dict(
            family=sm.families.NegativeBinomial(sm.families.links.nbinom())),
             fit=dict(maxiter=1)))
    estimator = estimator.fit([[1], [2]], [0.1, 0.2])

    assembler = assemblers.StatsmodelsGLMModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.BinNumExpr(
        ast.NumVal(-1.0),
        ast.BinNumExpr(
            ast.NumVal(1.0),
            ast.BinNumExpr(
                ast.NumVal(1.0),
                ast.ExpExpr(
                    ast.BinNumExpr(
                        ast.NumVal(0.0),
                        ast.BinNumExpr(
                            ast.NumVal(0.0),
                            ast.BinNumExpr(ast.FeatureRef(0),
                                           ast.NumVal(-1.1079583217),
                                           ast.BinNumOpType.MUL),
                            ast.BinNumOpType.ADD), ast.BinNumOpType.SUB)),
                ast.BinNumOpType.SUB), ast.BinNumOpType.MUL),
        ast.BinNumOpType.DIV)

    assert utils.cmp_exprs(actual, expected)
Пример #6
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
double score(double * input) {
    double var0;
    double var1;
    if ((1.0) == (1.0)) {
        var1 = 1.0;
    } else {
        var1 = 2.0;
    }
    if (((var1) + (2.0)) >= ((1.0) / (2.0))) {
        var0 = 1.0;
    } else {
        var0 = input[0];
    }
    return var0;
}"""

    interpreter = CInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #7
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
func score(input []float64) float64 {
    var var0 float64
    var var1 float64
    if (1) == (1) {
        var1 = 1
    } else {
        var1 = 2
    }
    if ((var1) + (2)) >= ((1) / (2)) {
        var0 = 1
    } else {
        var0 = input[0]
    }
    return var0
}"""

    interpreter = interpreters.GoInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #8
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
def score(input)
    if (1.0) == (1.0)
        var1 = 1.0
    else
        var1 = 2.0
    end
    if ((var1) + (2.0)) >= ((1.0).fdiv(2.0))
        var0 = 1.0
    else
        var0 = input[0]
    end
    var0
end
"""

    interpreter = RubyInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #9
0
def test_bin_class_sigmoid_output_transform():
    estimator = lightgbm.LGBMClassifier(n_estimators=1, random_state=1,
                                        max_depth=1, sigmoid=0.5)
    utils.get_binary_classification_model_trainer()(estimator)

    assembler = assemblers.LightGBMModelAssembler(estimator)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.BinNumExpr(
                        ast.NumVal(0.5),
                        ast.IfExpr(
                            ast.CompExpr(
                                ast.FeatureRef(23),
                                ast.NumVal(868.2),
                                ast.CompOpType.GT),
                            ast.NumVal(0.5197386243),
                            ast.NumVal(1.2474356828)),
                        ast.BinNumOpType.MUL),
                    ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB),
        sigmoid])

    assert utils.cmp_exprs(actual, expected)
Пример #10
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)
    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)
    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
<?php
function score(array $input) {
    $var0 = null;
    $var1 = null;
    if ((1.0) === (1.0)) {
        $var1 = 1.0;
    } else {
        $var1 = 2.0;
    }
    if ((($var1) + (2.0)) >= ((1.0) / (2.0))) {
        $var0 = 1.0;
    } else {
        $var0 = $input[0];
    }
    return $var0;
}
"""

    interpreter = PhpInterpreter()
    assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #11
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
def score(input):
    if (1) == (1):
        var1 = 1
    else:
        var1 = 2
    if ((var1) + (2)) >= ((1) / (2)):
        var0 = 1
    else:
        var0 = input[0]
    return var0
    """

    interpreter = interpreters.PythonInterpreter()

    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #12
0
def test_contains_expr():
    expected_code = """
using static System.Collections.Generic;
namespace ML {
    public static class Model {
        public static double Score(double[] input) {
            return Contains(var_1, input[0]);
        }
        private static double[] AddVectors(double[] v1, double[] v2) {
            double[] result = new double[v1.Length];
            for (int i = 0; i < v1.Length; ++i) {
                result[i] = v1[i] + v2[i];
            }
            return result;
        }
        private static double[] MulVectorNumber(double[] v1, double num) {
            double[] result = new double[v1.Length];
            for (int i = 0; i < v1.Length; ++i) {
                result[i] = v1[i] * num;
            }
            return result;
        }
        private static bool Contains(HashSet<int> v1, double featureRef) {
            return v1.Contains((int) featureRef);
        }
        static HashSet<int> var_1 = new HashSet<int>() { 0 };
    }
}
"""
    expr = ast.ContainsIntExpr([0], ast.FeatureRef(0))
    interpreter = CSharpInterpreter()
    r = interpreter.interpret(expr)
    print(r)
Пример #13
0
def test_nested_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    bool_test = ast.CompExpr(ast.NumVal(1), left, ast.CompOpType.EQ)

    expr_nested = ast.IfExpr(bool_test, ast.FeatureRef(2), ast.NumVal(2))

    expr = ast.IfExpr(bool_test, expr_nested, ast.NumVal(2))

    expected_code = """
def score(input):
    if (1) == (1):
        var1 = 1
    else:
        var1 = 2
    if (1) == ((var1) + (2)):
        if (1) == (1):
            var2 = 1
        else:
            var2 = 2
        if (1) == ((var2) + (2)):
            var0 = input[2]
        else:
            var0 = 2
    else:
        var0 = 2
    return var0
    """

    interpreter = interpreters.PythonInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #14
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
Module Model
Function score(ByRef input_vector() As Double) As Double
    Dim var0 As Double
    Dim var1 As Double
    If (1) == (1) Then
        var1 = 1
    Else
        var1 = 2
    End If
    If ((var1) + (2)) >= ((1) / (2)) Then
        var0 = 1
    Else
        var0 = input_vector(0)
    End If
    score = var0
End Function
End Module
"""

    interpreter = VisualBasicInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #15
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1),
                         ast.NumVal(1),
                         ast.CompOpType.EQ),
            ast.NumVal(1),
            ast.NumVal(2)),
        ast.NumVal(2),
        ast.BinNumOpType.ADD)
    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)
    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
fn score(input: Vec<f64>) -> f64 {
    let var0: f64;
    let var1: f64;
    if (1.0_f64) == (1.0_f64) {
        var1 = 1.0_f64;
    } else {
        var1 = 2.0_f64;
    }
    if ((var1) + (2.0_f64)) >= ((1.0_f64) / (2.0_f64)) {
        var0 = 1.0_f64;
    } else {
        var0 = input[0];
    }
    var0
}
"""

    interpreter = RustInterpreter()
    assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #16
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
module Model where
score :: [Double] -> Double
score input =
    func1
    where
        func0 =
            if ((1.0) == (1.0)) then
                1.0
            else
                2.0
        func1 =
            if (((func0) + (2.0)) >= ((1.0) / (2.0))) then
                1.0
            else
                (input) !! (0)
"""

    interpreter = HaskellInterpreter()
    actual_code = interpreter.interpret(expr)
    utils.assert_code_equal(actual_code, expected_code)
Пример #17
0
    def _assemble_tree(self, tree):
        if "leaf" in tree:
            return ast.NumVal(tree["leaf"])

        threshold = ast.NumVal(tree["split_condition"], dtype=np.float32)
        split = tree["split"]
        feature_idx = self._feature_name_to_idx.get(split, split)
        feature_ref = ast.FeatureRef(feature_idx)

        # Since comparison with NaN (missing) value always returns false we
        # should make sure that the node ID specified in the "missing" field
        # always ends up in the "else" branch of the ast.IfExpr.
        use_lt_comp = tree["missing"] == tree["no"]
        if use_lt_comp:
            comp_op = ast.CompOpType.LT
            true_child_id = tree["yes"]
            false_child_id = tree["no"]
        else:
            comp_op = ast.CompOpType.GTE
            true_child_id = tree["no"]
            false_child_id = tree["yes"]

        return ast.IfExpr(ast.CompExpr(feature_ref, threshold, comp_op),
                          self._assemble_child_tree(tree, true_child_id),
                          self._assemble_child_tree(tree, false_child_id))
Пример #18
0
def test_dependable_condition():
    left = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.NumVal(1), ast.NumVal(1), ast.CompOpType.EQ),
            ast.NumVal(1), ast.NumVal(2)), ast.NumVal(2), ast.BinNumOpType.ADD)

    right = ast.BinNumExpr(ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.DIV)
    bool_test = ast.CompExpr(left, right, ast.CompOpType.GTE)

    expr = ast.IfExpr(bool_test, ast.NumVal(1), ast.FeatureRef(0))

    expected_code = """
function Score([double[]] $InputVector) {
    [double]$var0 = 0
    [double]$var1 = 0
    if ((1) -eq (1)) {
        $var1 = 1
    } else {
        $var1 = 2
    }
    if ((($var1) + (2)) -ge ((1) / (2))) {
        $var0 = 1
    } else {
        $var0 = $InputVector[0]
    }
    return $var0
}
"""

    interpreter = PowershellInterpreter()
    utils.assert_code_equal(interpreter.interpret(expr), expected_code)
Пример #19
0
def test_binary_classification():
    estimator = lightgbm.LGBMClassifier(n_estimators=2, random_state=1,
                                        max_depth=1)
    utils.train_model_classification_binary(estimator)

    assembler = assemblers.LightGBMModelAssembler(estimator)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(0),
                                ast.SubroutineExpr(
                                    ast.IfExpr(
                                        ast.CompExpr(
                                            ast.FeatureRef(23),
                                            ast.NumVal(868.2000000000002),
                                            ast.CompOpType.GT),
                                        ast.NumVal(0.25986931215073095),
                                        ast.NumVal(0.6237178414050242))),
                                ast.BinNumOpType.ADD),
                            ast.SubroutineExpr(
                                ast.IfExpr(
                                    ast.CompExpr(
                                        ast.FeatureRef(7),
                                        ast.NumVal(0.05142),
                                        ast.CompOpType.GT),
                                    ast.NumVal(-0.1909605544006228),
                                    ast.NumVal(0.1293965108676673))),
                            ast.BinNumOpType.ADD)),
                    ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB),
        sigmoid])

    assert utils.cmp_exprs(actual, expected)
Пример #20
0
def test_two_features():
    estimator = linear_model.LinearRegression()
    estimator.coef_ = [1, 2]
    estimator.intercept_ = 3

    assembler = assemblers.SklearnLinearModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.BinNumExpr(
        ast.BinNumExpr(
            ast.NumVal(3),
            ast.BinNumExpr(ast.FeatureRef(0), ast.NumVal(1),
                           ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
        ast.BinNumExpr(ast.FeatureRef(1), ast.NumVal(2), ast.BinNumOpType.MUL),
        ast.BinNumOpType.ADD)

    assert utils.cmp_exprs(actual, expected)
Пример #21
0
def _rbf_kernel_ast(estimator, sup_vec_value, to_reuse=False):
    return ast.ExpExpr(ast.BinNumExpr(
        ast.NumVal(-estimator.gamma),
        ast.PowExpr(
            ast.BinNumExpr(ast.NumVal(sup_vec_value),
                           ast.FeatureRef(0), ast.BinNumOpType.SUB),
            ast.NumVal(2)), ast.BinNumOpType.MUL),
                       to_reuse=to_reuse)
Пример #22
0
def test_binary_class():
    estimator = linear_model.LogisticRegression()
    estimator.coef_ = np.array([[1, 2]])
    estimator.intercept_ = np.array([3])

    assembler = assemblers.SklearnLinearModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.BinNumExpr(
        ast.BinNumExpr(
            ast.NumVal(3),
            ast.BinNumExpr(ast.FeatureRef(0), ast.NumVal(1),
                           ast.BinNumOpType.MUL), ast.BinNumOpType.ADD),
        ast.BinNumExpr(ast.FeatureRef(1), ast.NumVal(2), ast.BinNumOpType.MUL),
        ast.BinNumOpType.ADD)

    assert utils.cmp_exprs(actual, expected)
Пример #23
0
def _linear_to_ast(coef, intercept):
    feature_weight_mul_ops = [
        utils.mul(ast.FeatureRef(index), ast.NumVal(value))
        for index, value in enumerate(coef)
    ]
    return utils.apply_op_to_expressions(ast.BinNumOpType.ADD,
                                         ast.NumVal(intercept),
                                         *feature_weight_mul_ops)
Пример #24
0
def test_leaves_cutoff_threshold():
    estimator = xgboost.XGBClassifier(n_estimators=2,
                                      random_state=1,
                                      max_depth=1)
    utils.train_model_classification_binary(estimator)

    assembler = assemblers.XGBoostModelAssembler(estimator,
                                                 leaves_cutoff_threshold=1)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(-0.0),
                                ast.SubroutineExpr(
                                    ast.IfExpr(
                                        ast.CompExpr(ast.FeatureRef(20),
                                                     ast.NumVal(16.7950001),
                                                     ast.CompOpType.GTE),
                                        ast.NumVal(-0.17062147),
                                        ast.NumVal(0.1638484))),
                                ast.BinNumOpType.ADD),
                            ast.SubroutineExpr(
                                ast.IfExpr(
                                    ast.CompExpr(ast.FeatureRef(27),
                                                 ast.NumVal(0.142349988),
                                                 ast.CompOpType.GTE),
                                    ast.NumVal(-0.16087772),
                                    ast.NumVal(0.149866998))),
                            ast.BinNumOpType.ADD)), ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB), sigmoid
    ])

    assert utils.cmp_exprs(actual, expected)
Пример #25
0
 def _cosine_kernel(self, support_vector):
     support_vector_norm = np.linalg.norm(support_vector)
     if support_vector_norm == 0.0:
         support_vector_norm = 1.0
     feature_norm = ast.SqrtExpr(
         utils.apply_op_to_expressions(
             ast.BinNumOpType.ADD,
             *[utils.mul(ast.FeatureRef(i), ast.FeatureRef(i))
               for i in range(len(support_vector))]),
         to_reuse=True)
     safe_feature_norm = ast.IfExpr(
         utils.eq(feature_norm, ast.NumVal(0.0)),
         ast.NumVal(1.0),
         feature_norm)
     kernel = self._linear_kernel(support_vector / support_vector_norm)
     kernel = utils.div(kernel, safe_feature_norm)
     return kernel
def test_two_conditions():
    estimator = tree.DecisionTreeRegressor()

    estimator.fit([[1], [2], [3]], [1, 2, 3])

    assembler = assemblers.TreeModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.IfExpr(
        ast.CompExpr(ast.FeatureRef(0), ast.NumVal(1.5), ast.CompOpType.LTE),
        ast.NumVal(1.0),
        ast.IfExpr(
            ast.CompExpr(ast.FeatureRef(0),
                         ast.NumVal(2.5), ast.CompOpType.LTE), ast.NumVal(2.0),
            ast.NumVal(3.0)))

    assert utils.cmp_exprs(actual, expected)
Пример #27
0
def test_binary_classification():
    estimator = xgboost.XGBClassifier(n_estimators=2,
                                      random_state=1,
                                      max_depth=1)
    utils.train_model_classification_binary(estimator)

    assembler = assemblers.XGBoostModelAssembler(estimator)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(-0.0),
                                ast.SubroutineExpr(
                                    ast.IfExpr(
                                        ast.CompExpr(ast.FeatureRef(20),
                                                     ast.NumVal(16.7950001),
                                                     ast.CompOpType.GTE),
                                        ast.NumVal(-0.173057005),
                                        ast.NumVal(0.163440868))),
                                ast.BinNumOpType.ADD),
                            ast.SubroutineExpr(
                                ast.IfExpr(
                                    ast.CompExpr(ast.FeatureRef(27),
                                                 ast.NumVal(0.142349988),
                                                 ast.CompOpType.GTE),
                                    ast.NumVal(-0.161026895),
                                    ast.NumVal(0.149405137))),
                            ast.BinNumOpType.ADD)), ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB), sigmoid
    ])

    assert utils.cmp_exprs(actual, expected)
Пример #28
0
def test_binary_classification():
    estimator = lightgbm.LGBMClassifier(n_estimators=2, random_state=1,
                                        max_depth=1)
    utils.train_model_classification_binary(estimator)

    assembler = assemblers.LightGBMModelAssembler(estimator)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(0),
                                ast.IfExpr(
                                    ast.CompExpr(
                                        ast.FeatureRef(23),
                                        ast.NumVal(868.2000000000002),
                                        ast.CompOpType.GT),
                                    ast.NumVal(0.2762557140263451),
                                    ast.NumVal(0.6399134166614473)),
                                ast.BinNumOpType.ADD),
                            ast.IfExpr(
                                ast.CompExpr(
                                    ast.FeatureRef(27),
                                    ast.NumVal(0.14205000000000004),
                                    ast.CompOpType.GT),
                                ast.NumVal(-0.2139321843285849),
                                ast.NumVal(0.1151466338793227)),
                            ast.BinNumOpType.ADD)),
                    ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB),
        sigmoid])

    assert utils.cmp_exprs(actual, expected)
Пример #29
0
def test_regression():
    estimator = lgb.LGBMRegressor(n_estimators=2, random_state=1, max_depth=1)
    utils.get_regression_model_trainer()(estimator)

    assembler = LightGBMModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.BinNumExpr(
        ast.IfExpr(
            ast.CompExpr(ast.FeatureRef(12), ast.NumVal(9.725),
                         ast.CompOpType.GT), ast.NumVal(22.030283219508686),
            ast.NumVal(23.27840740210207)),
        ast.IfExpr(
            ast.CompExpr(ast.FeatureRef(5), ast.NumVal(6.8375),
                         ast.CompOpType.GT), ast.NumVal(1.2777791671888081),
            ast.NumVal(-0.2686772850549309)), ast.BinNumOpType.ADD)

    assert utils.cmp_exprs(actual, expected)
Пример #30
0
 def kernel_ast(sup_vec_value):
     return ast.PowExpr(
         ast.BinNumExpr(
             ast.BinNumExpr(
                 ast.NumVal(estimator.gamma),
                 ast.BinNumExpr(ast.NumVal(sup_vec_value),
                                ast.FeatureRef(0), ast.BinNumOpType.MUL),
                 ast.BinNumOpType.MUL), ast.NumVal(0.0),
             ast.BinNumOpType.ADD), ast.NumVal(estimator.degree))