How to use the m2cgen.ast.SubroutineExpr function in m2cgen

To help you get started, we’ve selected a few m2cgen examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github BayesWitnesses / m2cgen / tests / assemblers / test_xgboost.py View on Github external
ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(-0.0),
                                ast.SubroutineExpr(
                                    ast.IfExpr(
                                        ast.CompExpr(
                                            ast.FeatureRef(20),
                                            ast.NumVal(16.7950001),
                                            ast.CompOpType.GTE),
                                        ast.NumVal(-0.17062147),
                                        ast.NumVal(0.1638484))),
                                ast.BinNumOpType.ADD),
                            ast.SubroutineExpr(
                                ast.IfExpr(
                                    ast.CompExpr(
                                        ast.FeatureRef(27),
                                        ast.NumVal(0.142349988),
                                        ast.CompOpType.GTE),
                                    ast.NumVal(-0.16087772),
                                    ast.NumVal(0.149866998))),
                            ast.BinNumOpType.ADD)),
                    ast.BinNumOpType.SUB)),
            ast.BinNumOpType.ADD),
        ast.BinNumOpType.DIV,
        to_reuse=True)

    expected = ast.VectorVal([
        ast.BinNumExpr(ast.NumVal(1), sigmoid, ast.BinNumOpType.SUB),
        sigmoid])
github BayesWitnesses / m2cgen / tests / assemblers / test_lightgbm.py View on Github external
def test_regression():
    estimator = lightgbm.LGBMRegressor(n_estimators=2, random_state=1,
                                       max_depth=1)
    utils.train_model_regression(estimator)

    assembler = assemblers.LightGBMModelAssembler(estimator)
    actual = assembler.assemble()

    expected = ast.SubroutineExpr(
        ast.BinNumExpr(
            ast.BinNumExpr(
                ast.NumVal(0),
                ast.IfExpr(
                    ast.CompExpr(
                        ast.FeatureRef(5),
                        ast.NumVal(6.8455),
                        ast.CompOpType.GT),
                    ast.NumVal(24.007392728914056),
                    ast.NumVal(22.35695742616179)),
                ast.BinNumOpType.ADD),
            ast.IfExpr(
                ast.CompExpr(
                    ast.FeatureRef(12),
                    ast.NumVal(9.63),
                    ast.CompOpType.GT),
github BayesWitnesses / m2cgen / tests / assemblers / test_xgboost.py View on Github external
estimator = xgboost.XGBClassifier(n_estimators=2, random_state=1,
                                      max_depth=1)
    utils.train_model_classification_binary(estimator)

    assembler = assemblers.XGBoostModelAssembler(estimator,
                                                 leaves_cutoff_threshold=1)
    actual = assembler.assemble()

    sigmoid = ast.BinNumExpr(
        ast.NumVal(1),
        ast.BinNumExpr(
            ast.NumVal(1),
            ast.ExpExpr(
                ast.BinNumExpr(
                    ast.NumVal(0),
                    ast.SubroutineExpr(
                        ast.BinNumExpr(
                            ast.BinNumExpr(
                                ast.NumVal(-0.0),
                                ast.SubroutineExpr(
                                    ast.IfExpr(
                                        ast.CompExpr(
                                            ast.FeatureRef(20),
                                            ast.NumVal(16.7950001),
                                            ast.CompOpType.GTE),
                                        ast.NumVal(-0.17062147),
                                        ast.NumVal(0.1638484))),
                                ast.BinNumOpType.ADD),
                            ast.SubroutineExpr(
                                ast.IfExpr(
                                    ast.CompExpr(
                                        ast.FeatureRef(27),
github BayesWitnesses / m2cgen / tests / interpreters / test_java.py View on Github external
def test_subroutine():
    expr = ast.BinNumExpr(
        ast.FeatureRef(0),
        ast.SubroutineExpr(
            ast.BinNumExpr(
                ast.NumVal(1), ast.NumVal(2), ast.BinNumOpType.ADD)),
        ast.BinNumOpType.MUL)

    expected_code = """
public class Model {

    public static double score(double[] input) {
        return (input[0]) * (subroutine0(input));
    }
    public static double subroutine0(double[] input) {
        return (1) + (2);
    }
}"""

    interpreter = interpreters.JavaInterpreter()
github BayesWitnesses / m2cgen / tests / assemblers / test_svm.py View on Github external
def _rbf_kernel_ast(estimator, sup_vec_value, to_reuse=False):
    negative_gamma_ast = ast.BinNumExpr(
        ast.NumVal(0),
        ast.NumVal(estimator.gamma),
        ast.BinNumOpType.SUB,
        to_reuse=True)

    return ast.SubroutineExpr(
        ast.ExpExpr(
            ast.BinNumExpr(
                negative_gamma_ast,
                ast.PowExpr(
                    ast.BinNumExpr(
                        ast.NumVal(sup_vec_value),
                        ast.FeatureRef(0),
                        ast.BinNumOpType.SUB),
                    ast.NumVal(2)),
                ast.BinNumOpType.MUL)),
        to_reuse=to_reuse)
github BayesWitnesses / m2cgen / m2cgen / assemblers / boosting.py View on Github external
to_sum = trees_ast

        # In a large tree we need to generate multiple subroutines to avoid
        # java limitations https://github.com/BayesWitnesses/m2cgen/issues/103.
        trees_num_leaves = [self._count_leaves(t) for t in trees]
        if sum(trees_num_leaves) > self._leaves_cutoff_threshold:
            to_sum = self._split_into_subroutines(trees_ast, trees_num_leaves)

        tmp_ast = utils.apply_op_to_expressions(
            ast.BinNumOpType.ADD,
            ast.NumVal(base_score),
            *to_sum)

        result_ast = self._final_transform(tmp_ast)

        return ast.SubroutineExpr(result_ast)
github BayesWitnesses / m2cgen / m2cgen / assemblers / svm.py View on Github external
def _apply_kernel(self, support_vectors, to_reuse=False):
        kernel_exprs = []
        for v in support_vectors:
            kernel = self._kernel_fun(v)
            kernel_exprs.append(ast.SubroutineExpr(kernel, to_reuse=to_reuse))
        return kernel_exprs
github BayesWitnesses / m2cgen / m2cgen / assemblers / linear.py View on Github external
def _build_ast(self):
        coef = utils.to_2d_array(self.model.coef_)
        intercept = utils.to_1d_array(self.model.intercept_)

        if coef.shape[0] == 1:
            return _linear_to_ast(coef[0], intercept[0])

        exprs = []
        for idx in range(coef.shape[0]):
            exprs.append(ast.SubroutineExpr(
                _linear_to_ast(coef[idx], intercept[idx])))
        return ast.VectorVal(exprs)